[ 539.992070] env[69994]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=69994) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 539.992523] env[69994]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=69994) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 539.992523] env[69994]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=69994) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 539.992848] env[69994]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 540.091616] env[69994]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=69994) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:349}} [ 540.101815] env[69994]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=69994) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:372}} [ 540.145286] env[69994]: INFO oslo_service.periodic_task [-] Skipping periodic task _heal_instance_info_cache because its interval is negative [ 540.703988] env[69994]: INFO nova.virt.driver [None req-04a15d84-1dbf-493e-bed7-9b70a0c45746 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 540.774848] env[69994]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 540.775024] env[69994]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 540.775122] env[69994]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=69994) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 543.681072] env[69994]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-04996455-3617-4847-a3b9-3c47f15a8f85 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.697602] env[69994]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=69994) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 543.697734] env[69994]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-755e1530-a8a7-4106-9746-f48b338c8a1c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.723271] env[69994]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 2b06d. [ 543.723383] env[69994]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 2.948s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 543.723873] env[69994]: INFO nova.virt.vmwareapi.driver [None req-04a15d84-1dbf-493e-bed7-9b70a0c45746 None None] VMware vCenter version: 7.0.3 [ 543.727406] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-872c4256-4e11-4b1d-aca0-032ff3fa57a9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.744263] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c86dcf65-d68d-4a60-b8a3-be3e2cbc9b19 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.749977] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5803bb10-9d20-4143-ab1a-5cd11f420583 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.756585] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a218644d-9fa3-4a8f-a145-a823f9dd24fe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.769372] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d74ceb1-79e7-4a08-a592-b9214670ad96 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.775087] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71406d06-d14b-44b4-9631-eca28c3e9142 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.805269] env[69994]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-b0ad99d7-8944-4114-8f08-b673fd4686c8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.810131] env[69994]: DEBUG nova.virt.vmwareapi.driver [None req-04a15d84-1dbf-493e-bed7-9b70a0c45746 None None] Extension org.openstack.compute already exists. {{(pid=69994) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:228}} [ 543.812747] env[69994]: INFO nova.compute.provider_config [None req-04a15d84-1dbf-493e-bed7-9b70a0c45746 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 544.316530] env[69994]: DEBUG nova.context [None req-04a15d84-1dbf-493e-bed7-9b70a0c45746 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),3e6d95a2-b5e7-4f4e-ba2c-ca1211848c5a(cell1) {{(pid=69994) load_cells /opt/stack/nova/nova/context.py:464}} [ 544.318654] env[69994]: DEBUG oslo_concurrency.lockutils [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 544.318883] env[69994]: DEBUG oslo_concurrency.lockutils [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 544.319612] env[69994]: DEBUG oslo_concurrency.lockutils [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 544.320061] env[69994]: DEBUG oslo_concurrency.lockutils [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] Acquiring lock "3e6d95a2-b5e7-4f4e-ba2c-ca1211848c5a" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 544.320257] env[69994]: DEBUG oslo_concurrency.lockutils [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] Lock "3e6d95a2-b5e7-4f4e-ba2c-ca1211848c5a" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 544.321313] env[69994]: DEBUG oslo_concurrency.lockutils [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] Lock "3e6d95a2-b5e7-4f4e-ba2c-ca1211848c5a" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 544.341678] env[69994]: INFO dbcounter [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] Registered counter for database nova_cell0 [ 544.350255] env[69994]: INFO dbcounter [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] Registered counter for database nova_cell1 [ 544.795932] env[69994]: DEBUG oslo_db.sqlalchemy.engines [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=69994) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 544.796403] env[69994]: DEBUG oslo_db.sqlalchemy.engines [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=69994) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 544.801449] env[69994]: ERROR nova.db.main.api [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 544.801449] env[69994]: result = function(*args, **kwargs) [ 544.801449] env[69994]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 544.801449] env[69994]: return func(*args, **kwargs) [ 544.801449] env[69994]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 544.801449] env[69994]: result = fn(*args, **kwargs) [ 544.801449] env[69994]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 544.801449] env[69994]: return f(*args, **kwargs) [ 544.801449] env[69994]: File "/opt/stack/nova/nova/objects/service.py", line 557, in _db_service_get_minimum_version [ 544.801449] env[69994]: return db.service_get_minimum_version(context, binaries) [ 544.801449] env[69994]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 544.801449] env[69994]: _check_db_access() [ 544.801449] env[69994]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 544.801449] env[69994]: stacktrace = ''.join(traceback.format_stack()) [ 544.801449] env[69994]: [ 544.802284] env[69994]: ERROR nova.db.main.api [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 544.802284] env[69994]: result = function(*args, **kwargs) [ 544.802284] env[69994]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 544.802284] env[69994]: return func(*args, **kwargs) [ 544.802284] env[69994]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 544.802284] env[69994]: result = fn(*args, **kwargs) [ 544.802284] env[69994]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 544.802284] env[69994]: return f(*args, **kwargs) [ 544.802284] env[69994]: File "/opt/stack/nova/nova/objects/service.py", line 557, in _db_service_get_minimum_version [ 544.802284] env[69994]: return db.service_get_minimum_version(context, binaries) [ 544.802284] env[69994]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 544.802284] env[69994]: _check_db_access() [ 544.802284] env[69994]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 544.802284] env[69994]: stacktrace = ''.join(traceback.format_stack()) [ 544.802284] env[69994]: [ 544.802709] env[69994]: WARNING nova.objects.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 544.802817] env[69994]: WARNING nova.objects.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] Failed to get minimum service version for cell 3e6d95a2-b5e7-4f4e-ba2c-ca1211848c5a [ 544.803355] env[69994]: DEBUG oslo_concurrency.lockutils [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] Acquiring lock "singleton_lock" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 544.803524] env[69994]: DEBUG oslo_concurrency.lockutils [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] Acquired lock "singleton_lock" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 544.803796] env[69994]: DEBUG oslo_concurrency.lockutils [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] Releasing lock "singleton_lock" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 544.804149] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] Full set of CONF: {{(pid=69994) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/service.py:357}} [ 544.804297] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] ******************************************************************************** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 544.804426] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] Configuration options gathered from: {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 544.804563] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 544.804765] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 544.804905] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] ================================================================================ {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 544.805126] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] allow_resize_to_same_host = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.805302] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] arq_binding_timeout = 300 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.805475] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] backdoor_port = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.805618] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] backdoor_socket = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.805817] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] block_device_allocate_retries = 60 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.805989] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] block_device_allocate_retries_interval = 3 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.806179] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cert = self.pem {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.806348] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.806515] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] compute_monitors = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.806684] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] config_dir = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.807093] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] config_drive_format = iso9660 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.807244] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.807420] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] config_source = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.807593] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] console_host = devstack {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.807763] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] control_exchange = nova {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.807923] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cpu_allocation_ratio = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.808103] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] daemon = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.808278] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] debug = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.808437] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] default_access_ip_network_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.808603] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] default_availability_zone = nova {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.808790] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] default_ephemeral_format = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.808970] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] default_green_pool_size = 1000 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.809233] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.809402] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] default_schedule_zone = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.809564] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] disk_allocation_ratio = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.809724] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] enable_new_services = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.809903] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] enabled_apis = ['osapi_compute'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.810079] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] enabled_ssl_apis = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.810243] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] flat_injected = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.810402] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] force_config_drive = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.810563] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] force_raw_images = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.810729] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] graceful_shutdown_timeout = 5 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.810894] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] heal_instance_info_cache_interval = -1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.811138] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] host = cpu-1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.811321] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.811487] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] initial_disk_allocation_ratio = 1.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.811649] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] initial_ram_allocation_ratio = 1.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.811871] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.812055] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] instance_build_timeout = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.812221] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] instance_delete_interval = 300 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.812388] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] instance_format = [instance: %(uuid)s] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.812552] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] instance_name_template = instance-%08x {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.812712] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] instance_usage_audit = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.812912] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] instance_usage_audit_period = month {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.813099] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.813268] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] instances_path = /opt/stack/data/nova/instances {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.813434] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] internal_service_availability_zone = internal {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.813612] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] key = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.813785] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] live_migration_retry_count = 30 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.813958] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] log_color = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.814138] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] log_config_append = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.814305] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.814466] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] log_dir = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.814621] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] log_file = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.814747] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] log_options = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.814910] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] log_rotate_interval = 1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.815089] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] log_rotate_interval_type = days {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.815260] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] log_rotation_type = none {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.815391] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.815516] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.815706] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.815891] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.816035] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.816204] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] long_rpc_timeout = 1800 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.816365] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] max_concurrent_builds = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.816524] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] max_concurrent_live_migrations = 1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.816681] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] max_concurrent_snapshots = 5 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.816859] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] max_local_block_devices = 3 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.817034] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] max_logfile_count = 30 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.817200] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] max_logfile_size_mb = 200 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.817358] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] maximum_instance_delete_attempts = 5 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.817527] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] metadata_listen = 0.0.0.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.817693] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] metadata_listen_port = 8775 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.817862] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] metadata_workers = 2 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.818052] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] migrate_max_retries = -1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.818240] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] mkisofs_cmd = genisoimage {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.818447] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] my_block_storage_ip = 10.180.1.21 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.818580] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] my_ip = 10.180.1.21 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.818785] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.818952] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] network_allocate_retries = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.819147] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.819317] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] osapi_compute_listen = 0.0.0.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.819480] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] osapi_compute_listen_port = 8774 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.819646] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] osapi_compute_unique_server_name_scope = {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.819815] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] osapi_compute_workers = 2 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.819976] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] password_length = 12 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.820149] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] periodic_enable = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.820308] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] periodic_fuzzy_delay = 60 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.820474] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] pointer_model = usbtablet {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.820638] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] preallocate_images = none {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.820795] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] publish_errors = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.820925] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] pybasedir = /opt/stack/nova {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.821094] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] ram_allocation_ratio = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.821258] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] rate_limit_burst = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.821424] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] rate_limit_except_level = CRITICAL {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.821582] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] rate_limit_interval = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.821738] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] reboot_timeout = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.821895] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] reclaim_instance_interval = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.822064] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] record = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.822232] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] reimage_timeout_per_gb = 60 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.822399] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] report_interval = 120 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.822561] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] rescue_timeout = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.822721] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] reserved_host_cpus = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.822883] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] reserved_host_disk_mb = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.823054] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] reserved_host_memory_mb = 512 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.823275] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] reserved_huge_pages = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.823463] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] resize_confirm_window = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.823651] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] resize_fs_using_block_device = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.823823] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] resume_guests_state_on_host_boot = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.823997] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.824182] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] rpc_response_timeout = 60 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.824344] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] run_external_periodic_tasks = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.824515] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] running_deleted_instance_action = reap {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.824675] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] running_deleted_instance_poll_interval = 1800 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.824837] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] running_deleted_instance_timeout = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.824994] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] scheduler_instance_sync_interval = 120 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.825180] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] service_down_time = 720 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.825350] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] servicegroup_driver = db {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.825506] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] shell_completion = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.825696] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] shelved_offload_time = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.825860] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] shelved_poll_interval = 3600 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.826047] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] shutdown_timeout = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.826216] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] source_is_ipv6 = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.826375] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] ssl_only = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.826630] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.826818] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] sync_power_state_interval = 600 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.826995] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] sync_power_state_pool_size = 1000 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.827181] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] syslog_log_facility = LOG_USER {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.827340] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] tempdir = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.827502] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] timeout_nbd = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.827672] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] transport_url = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.827835] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] update_resources_interval = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.827996] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] use_cow_images = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.828170] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] use_journal = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.828327] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] use_json = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.828486] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] use_rootwrap_daemon = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.828647] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] use_stderr = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.828804] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] use_syslog = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.828961] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vcpu_pin_set = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.829144] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vif_plugging_is_fatal = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.829313] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vif_plugging_timeout = 300 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.829480] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] virt_mkfs = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.829641] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] volume_usage_poll_interval = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.829800] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] watch_log_file = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.829967] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] web = /usr/share/spice-html5 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 544.830169] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.830336] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.830499] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.830668] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_concurrency.disable_process_locking = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.830964] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.831171] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.831341] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.831513] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.831687] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.831856] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.832059] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api.auth_strategy = keystone {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.832233] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api.compute_link_prefix = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.832409] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.832583] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api.dhcp_domain = novalocal {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.832754] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api.enable_instance_password = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.832919] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api.glance_link_prefix = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.833099] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.833278] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.833443] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api.instance_list_per_project_cells = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.833635] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api.list_records_by_skipping_down_cells = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.833809] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api.local_metadata_per_cell = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.833982] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api.max_limit = 1000 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.834165] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api.metadata_cache_expiration = 15 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.834345] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api.neutron_default_tenant_id = default {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.834517] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api.response_validation = warn {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.834690] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api.use_neutron_default_nets = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.834859] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.835033] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.835207] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.835380] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.835550] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api.vendordata_dynamic_targets = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.835744] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api.vendordata_jsonfile_path = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.835942] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.836155] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.backend = dogpile.cache.memcached {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.836327] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.backend_argument = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.836490] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.backend_expiration_time = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.836678] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.config_prefix = cache.oslo {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.836887] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.dead_timeout = 60.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.837080] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.debug_cache_backend = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.837252] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.enable_retry_client = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.837417] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.enable_socket_keepalive = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.837589] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.enabled = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.837755] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.enforce_fips_mode = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.837920] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.expiration_time = 600 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.838099] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.hashclient_retry_attempts = 2 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.838271] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.hashclient_retry_delay = 1.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.838436] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.memcache_dead_retry = 300 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.838595] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.memcache_password = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.838762] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.838925] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.839102] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.memcache_pool_maxsize = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.839271] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.839434] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.memcache_sasl_enabled = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.839614] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.839783] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.memcache_socket_timeout = 1.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.839946] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.memcache_username = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.840127] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.proxies = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.840292] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.redis_db = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.840453] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.redis_password = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.840625] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.redis_sentinel_service_name = mymaster {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.840801] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.840973] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.redis_server = localhost:6379 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.841156] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.redis_socket_timeout = 1.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.841317] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.redis_username = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.841481] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.retry_attempts = 2 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.841647] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.retry_delay = 0.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.841811] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.socket_keepalive_count = 1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.841973] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.socket_keepalive_idle = 1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.842149] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.socket_keepalive_interval = 1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.842309] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.tls_allowed_ciphers = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.842468] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.tls_cafile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.842624] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.tls_certfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.842784] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.tls_enabled = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.842943] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cache.tls_keyfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.843125] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cinder.auth_section = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.843311] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cinder.auth_type = password {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.843598] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cinder.cafile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.843809] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cinder.catalog_info = volumev3::publicURL {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.843982] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cinder.certfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.844181] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cinder.collect_timing = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.844387] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cinder.cross_az_attach = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.844556] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cinder.debug = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.844720] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cinder.endpoint_template = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.844888] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cinder.http_retries = 3 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.845060] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cinder.insecure = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.845224] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cinder.keyfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.845397] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cinder.os_region_name = RegionOne {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.845562] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cinder.split_loggers = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.845746] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cinder.timeout = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.845929] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.846104] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] compute.cpu_dedicated_set = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.846266] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] compute.cpu_shared_set = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.846432] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] compute.image_type_exclude_list = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.846595] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.846771] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] compute.max_concurrent_disk_ops = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.846952] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] compute.max_disk_devices_to_attach = -1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.847151] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.847382] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.847559] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] compute.resource_provider_association_refresh = 300 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.847735] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.847898] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] compute.shutdown_retry_interval = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.848099] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.848284] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] conductor.workers = 2 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.848465] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] console.allowed_origins = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.848627] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] console.ssl_ciphers = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.848800] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] console.ssl_minimum_version = default {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.848971] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] consoleauth.enforce_session_timeout = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.849157] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] consoleauth.token_ttl = 600 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.849332] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cyborg.cafile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.849493] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cyborg.certfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.849657] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cyborg.collect_timing = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.849819] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cyborg.connect_retries = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.849978] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cyborg.connect_retry_delay = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.850153] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cyborg.endpoint_override = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.850320] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cyborg.insecure = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.850479] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cyborg.keyfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.850641] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cyborg.max_version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.850803] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cyborg.min_version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.850967] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cyborg.region_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.851143] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cyborg.retriable_status_codes = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.851303] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cyborg.service_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.851473] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cyborg.service_type = accelerator {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.851638] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cyborg.split_loggers = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.851798] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cyborg.status_code_retries = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.851960] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cyborg.status_code_retry_delay = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.852134] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cyborg.timeout = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.852318] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.852482] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] cyborg.version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.852654] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] database.asyncio_connection = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.852818] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] database.asyncio_slave_connection = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.852990] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] database.backend = sqlalchemy {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.853177] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] database.connection = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.853346] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] database.connection_debug = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.853518] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] database.connection_parameters = {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.853686] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] database.connection_recycle_time = 3600 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.853851] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] database.connection_trace = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.854021] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] database.db_inc_retry_interval = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.854191] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] database.db_max_retries = 20 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.854354] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] database.db_max_retry_interval = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.854514] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] database.db_retry_interval = 1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.854674] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] database.max_overflow = 50 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.854836] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] database.max_pool_size = 5 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.854996] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] database.max_retries = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.855182] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.855343] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] database.mysql_wsrep_sync_wait = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.855501] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] database.pool_timeout = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.855704] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] database.retry_interval = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.855861] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] database.slave_connection = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.856038] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] database.sqlite_synchronous = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.856205] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] database.use_db_reconnect = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.856375] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api_database.asyncio_connection = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.856536] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api_database.asyncio_slave_connection = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.856749] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api_database.backend = sqlalchemy {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.857070] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api_database.connection = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.857382] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api_database.connection_debug = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.857707] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api_database.connection_parameters = {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.858091] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api_database.connection_recycle_time = 3600 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.858267] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api_database.connection_trace = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.858453] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api_database.db_inc_retry_interval = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.858627] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api_database.db_max_retries = 20 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.858798] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api_database.db_max_retry_interval = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.858968] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api_database.db_retry_interval = 1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.859151] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api_database.max_overflow = 50 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.859317] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api_database.max_pool_size = 5 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.859481] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api_database.max_retries = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.859655] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.859818] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.859977] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api_database.pool_timeout = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.860154] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api_database.retry_interval = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.860314] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api_database.slave_connection = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.860475] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] api_database.sqlite_synchronous = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.860652] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] devices.enabled_mdev_types = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.860833] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.861012] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] ephemeral_storage_encryption.default_format = luks {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.861185] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] ephemeral_storage_encryption.enabled = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.861347] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.861519] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] glance.api_servers = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.861685] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] glance.cafile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.861847] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] glance.certfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.862019] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] glance.collect_timing = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.862182] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] glance.connect_retries = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.862342] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] glance.connect_retry_delay = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.862505] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] glance.debug = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.862673] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] glance.default_trusted_certificate_ids = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.862835] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] glance.enable_certificate_validation = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.862996] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] glance.enable_rbd_download = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.863170] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] glance.endpoint_override = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.863337] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] glance.insecure = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.863499] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] glance.keyfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.863659] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] glance.max_version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.863819] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] glance.min_version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.863982] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] glance.num_retries = 3 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.864176] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] glance.rbd_ceph_conf = {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.864335] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] glance.rbd_connect_timeout = 5 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.864505] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] glance.rbd_pool = {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.864672] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] glance.rbd_user = {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.864915] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] glance.region_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.865137] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] glance.retriable_status_codes = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.865383] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] glance.service_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.865581] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] glance.service_type = image {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.865784] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] glance.split_loggers = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.866022] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] glance.status_code_retries = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.866254] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] glance.status_code_retry_delay = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.866473] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] glance.timeout = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.866741] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.866878] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] glance.verify_glance_signatures = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.867120] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] glance.version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.867307] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] guestfs.debug = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.867481] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] manila.auth_section = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.867651] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] manila.auth_type = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.867816] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] manila.cafile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.867989] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] manila.certfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.868192] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] manila.collect_timing = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.868359] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] manila.connect_retries = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.868521] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] manila.connect_retry_delay = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.868680] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] manila.endpoint_override = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.868847] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] manila.insecure = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.869015] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] manila.keyfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.869183] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] manila.max_version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.869352] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] manila.min_version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.869567] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] manila.region_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.869741] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] manila.retriable_status_codes = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.869904] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] manila.service_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.870091] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] manila.service_type = shared-file-system {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.870262] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] manila.share_apply_policy_timeout = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.870426] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] manila.split_loggers = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.870586] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] manila.status_code_retries = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.870746] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] manila.status_code_retry_delay = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.870908] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] manila.timeout = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.871210] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.871402] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] manila.version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.871576] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] mks.enabled = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.871923] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.872158] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] image_cache.manager_interval = 2400 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.872354] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] image_cache.precache_concurrency = 1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.872532] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] image_cache.remove_unused_base_images = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.872703] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.872874] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.873063] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] image_cache.subdirectory_name = _base {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.873244] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] ironic.api_max_retries = 60 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.873410] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] ironic.api_retry_interval = 2 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.873572] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] ironic.auth_section = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.873737] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] ironic.auth_type = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.873898] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] ironic.cafile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.874070] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] ironic.certfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.874314] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] ironic.collect_timing = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.874515] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] ironic.conductor_group = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.874683] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] ironic.connect_retries = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.874847] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] ironic.connect_retry_delay = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.875023] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] ironic.endpoint_override = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.875184] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] ironic.insecure = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.875345] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] ironic.keyfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.875505] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] ironic.max_version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.875684] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] ironic.min_version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.875862] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] ironic.peer_list = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.876043] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] ironic.region_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.876210] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] ironic.retriable_status_codes = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.876373] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] ironic.serial_console_state_timeout = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.876533] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] ironic.service_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.876704] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] ironic.service_type = baremetal {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.876889] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] ironic.shard = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.877072] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] ironic.split_loggers = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.877236] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] ironic.status_code_retries = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.877397] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] ironic.status_code_retry_delay = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.877554] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] ironic.timeout = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.877738] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.877901] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] ironic.version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.878122] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.878309] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] key_manager.fixed_key = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.878493] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.878657] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] barbican.barbican_api_version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.878820] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] barbican.barbican_endpoint = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.879022] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] barbican.barbican_endpoint_type = public {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.879210] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] barbican.barbican_region_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.879372] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] barbican.cafile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.879533] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] barbican.certfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.879698] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] barbican.collect_timing = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.879860] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] barbican.insecure = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.880029] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] barbican.keyfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.880201] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] barbican.number_of_retries = 60 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.880362] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] barbican.retry_delay = 1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.880525] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] barbican.send_service_user_token = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.880686] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] barbican.split_loggers = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.880843] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] barbican.timeout = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.881010] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] barbican.verify_ssl = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.881177] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] barbican.verify_ssl_path = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.881342] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] barbican_service_user.auth_section = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.881535] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] barbican_service_user.auth_type = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.881702] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] barbican_service_user.cafile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.881861] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] barbican_service_user.certfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.882034] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] barbican_service_user.collect_timing = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.882200] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] barbican_service_user.insecure = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.882357] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] barbican_service_user.keyfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.882519] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] barbican_service_user.split_loggers = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.882677] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] barbican_service_user.timeout = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.882842] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vault.approle_role_id = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.882999] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vault.approle_secret_id = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.883209] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vault.kv_mountpoint = secret {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.883393] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vault.kv_path = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.883560] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vault.kv_version = 2 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.883722] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vault.namespace = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.883882] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vault.root_token_id = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.884052] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vault.ssl_ca_crt_file = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.884227] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vault.timeout = 60.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.884391] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vault.use_ssl = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.884563] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.884733] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] keystone.cafile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.884894] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] keystone.certfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.885070] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] keystone.collect_timing = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.885233] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] keystone.connect_retries = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.885393] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] keystone.connect_retry_delay = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.885552] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] keystone.endpoint_override = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.885743] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] keystone.insecure = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.885912] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] keystone.keyfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.886086] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] keystone.max_version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.886248] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] keystone.min_version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.886442] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] keystone.region_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.886614] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] keystone.retriable_status_codes = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.886799] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] keystone.service_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.886977] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] keystone.service_type = identity {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.887156] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] keystone.split_loggers = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.887316] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] keystone.status_code_retries = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.887475] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] keystone.status_code_retry_delay = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.887632] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] keystone.timeout = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.887812] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.887977] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] keystone.version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.888187] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.ceph_mount_options = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.888589] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.ceph_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.888850] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.connection_uri = {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.889061] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.cpu_mode = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.889242] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.cpu_model_extra_flags = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.889414] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.cpu_models = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.889588] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.cpu_power_governor_high = performance {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.889767] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.cpu_power_governor_low = powersave {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.889945] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.cpu_power_management = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.890136] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.890372] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.device_detach_attempts = 8 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.890557] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.device_detach_timeout = 20 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.890731] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.disk_cachemodes = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.890896] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.disk_prefix = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.891076] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.enabled_perf_events = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.891246] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.file_backed_memory = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.891412] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.gid_maps = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.891573] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.hw_disk_discard = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.891732] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.hw_machine_type = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.891906] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.images_rbd_ceph_conf = {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.892085] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.892252] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.892420] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.images_rbd_glance_store_name = {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.892588] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.images_rbd_pool = rbd {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.892765] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.images_type = default {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.892978] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.images_volume_group = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.893167] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.inject_key = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.893333] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.inject_partition = -2 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.893498] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.inject_password = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.893665] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.iscsi_iface = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.893833] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.iser_use_multipath = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.893997] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.live_migration_bandwidth = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.894176] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.894339] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.live_migration_downtime = 500 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.894502] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.894663] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.894830] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.live_migration_inbound_addr = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.894992] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.898568] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.live_migration_permit_post_copy = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.898797] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.live_migration_scheme = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.899047] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.live_migration_timeout_action = abort {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.899268] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.live_migration_tunnelled = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.899455] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.live_migration_uri = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.899627] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.live_migration_with_native_tls = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.899807] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.max_queues = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.900082] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.900341] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.900558] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.nfs_mount_options = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.900872] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.901071] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.901245] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.num_iser_scan_tries = 5 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.901412] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.num_memory_encrypted_guests = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.901578] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.901748] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.num_pcie_ports = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.901920] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.num_volume_scan_tries = 5 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.902106] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.pmem_namespaces = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.902275] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.quobyte_client_cfg = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.902572] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.902761] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.rbd_connect_timeout = 5 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.902934] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.903115] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.903283] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.rbd_secret_uuid = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.903447] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.rbd_user = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.903639] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.903832] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.remote_filesystem_transport = ssh {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.904014] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.rescue_image_id = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.904181] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.rescue_kernel_id = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.904339] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.rescue_ramdisk_id = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.904508] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.904671] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.rx_queue_size = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.904872] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.smbfs_mount_options = {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.905154] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.905346] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.snapshot_compression = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.905513] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.snapshot_image_format = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.905769] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.905950] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.sparse_logical_volumes = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.906132] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.swtpm_enabled = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.906306] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.swtpm_group = tss {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.906478] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.swtpm_user = tss {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.906652] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.sysinfo_serial = unique {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.906813] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.tb_cache_size = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.906975] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.tx_queue_size = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.907158] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.uid_maps = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.907324] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.use_virtio_for_bridges = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.907499] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.virt_type = kvm {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.907670] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.volume_clear = zero {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.907837] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.volume_clear_size = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.908020] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.volume_enforce_multipath = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.908184] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.volume_use_multipath = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.908347] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.vzstorage_cache_path = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.908517] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.908686] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.vzstorage_mount_group = qemu {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.908852] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.vzstorage_mount_opts = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.909033] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.909326] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.909509] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.vzstorage_mount_user = stack {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.909680] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.909855] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] neutron.auth_section = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.910041] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] neutron.auth_type = password {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.910208] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] neutron.cafile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.910369] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] neutron.certfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.910536] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] neutron.collect_timing = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.910696] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] neutron.connect_retries = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.910859] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] neutron.connect_retry_delay = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.911041] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] neutron.default_floating_pool = public {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.911210] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] neutron.endpoint_override = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.911374] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] neutron.extension_sync_interval = 600 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.911541] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] neutron.http_retries = 3 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.911708] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] neutron.insecure = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.911872] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] neutron.keyfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.912070] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] neutron.max_version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.912268] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.912482] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] neutron.min_version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.912677] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] neutron.ovs_bridge = br-int {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.912882] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] neutron.physnets = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.913109] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] neutron.region_name = RegionOne {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.913322] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] neutron.retriable_status_codes = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.913526] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] neutron.service_metadata_proxy = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.913696] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] neutron.service_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.913873] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] neutron.service_type = network {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.914052] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] neutron.split_loggers = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.914219] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] neutron.status_code_retries = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.914382] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] neutron.status_code_retry_delay = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.914543] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] neutron.timeout = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.914724] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.914888] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] neutron.version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.915072] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] notifications.bdms_in_notifications = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.915255] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] notifications.default_level = INFO {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.915422] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] notifications.include_share_mapping = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.915606] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] notifications.notification_format = unversioned {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.915817] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] notifications.notify_on_state_change = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.916012] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.916204] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] pci.alias = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.916376] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] pci.device_spec = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.916542] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] pci.report_in_placement = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.916733] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] placement.auth_section = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.916919] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] placement.auth_type = password {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.917104] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.917270] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] placement.cafile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.917429] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] placement.certfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.917592] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] placement.collect_timing = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.917752] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] placement.connect_retries = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.917915] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] placement.connect_retry_delay = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.918085] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] placement.default_domain_id = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.918247] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] placement.default_domain_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.918405] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] placement.domain_id = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.918565] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] placement.domain_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.918721] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] placement.endpoint_override = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.918883] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] placement.insecure = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.919055] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] placement.keyfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.919219] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] placement.max_version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.919376] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] placement.min_version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.919546] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] placement.password = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.919705] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] placement.project_domain_id = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.919874] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] placement.project_domain_name = Default {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.920075] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] placement.project_id = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.920308] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] placement.project_name = service {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.920488] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] placement.region_name = RegionOne {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.920655] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] placement.retriable_status_codes = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.920831] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] placement.service_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.921013] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] placement.service_type = placement {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.921195] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] placement.split_loggers = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.921355] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] placement.status_code_retries = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.921517] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] placement.status_code_retry_delay = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.921677] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] placement.system_scope = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.921836] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] placement.timeout = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.921993] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] placement.trust_id = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.922168] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] placement.user_domain_id = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.922336] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] placement.user_domain_name = Default {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.922495] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] placement.user_id = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.922669] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] placement.username = nova {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.922850] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.923022] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] placement.version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.923214] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] quota.cores = 20 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.923382] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] quota.count_usage_from_placement = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.923564] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.923754] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] quota.injected_file_content_bytes = 10240 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.923925] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] quota.injected_file_path_length = 255 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.924104] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] quota.injected_files = 5 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.924275] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] quota.instances = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.924442] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] quota.key_pairs = 100 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.924608] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] quota.metadata_items = 128 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.924780] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] quota.ram = 51200 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.924944] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] quota.recheck_quota = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.925124] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] quota.server_group_members = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.925293] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] quota.server_groups = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.925503] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] quota.unified_limits_resource_list = ['servers'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.925696] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] quota.unified_limits_resource_strategy = require {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.925895] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.926078] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.926246] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] scheduler.image_metadata_prefilter = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.926410] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.926576] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] scheduler.max_attempts = 3 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.926770] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] scheduler.max_placement_results = 1000 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.926947] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.927127] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] scheduler.query_placement_for_image_type_support = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.927292] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.927465] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] scheduler.workers = 2 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.927658] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.927855] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.928060] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.928240] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.928409] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.928576] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.928741] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.928933] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.929114] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] filter_scheduler.host_subset_size = 1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.929283] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.929438] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.929602] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] filter_scheduler.image_props_weight_multiplier = 0.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.929766] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] filter_scheduler.image_props_weight_setting = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.929943] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.930123] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] filter_scheduler.isolated_hosts = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.930287] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] filter_scheduler.isolated_images = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.930448] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.930607] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.930769] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.930930] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] filter_scheduler.pci_in_placement = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.931107] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.931272] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.931434] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.931592] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.931753] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.931914] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.932088] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] filter_scheduler.track_instance_changes = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.932269] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.932439] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] metrics.required = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.932604] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] metrics.weight_multiplier = 1.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.932766] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.932930] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] metrics.weight_setting = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.933268] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.933447] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] serial_console.enabled = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.933644] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] serial_console.port_range = 10000:20000 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.933828] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.934011] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.934189] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] serial_console.serialproxy_port = 6083 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.934356] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] service_user.auth_section = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.934530] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] service_user.auth_type = password {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.934689] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] service_user.cafile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.934848] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] service_user.certfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.935014] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] service_user.collect_timing = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.935181] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] service_user.insecure = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.935341] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] service_user.keyfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.935512] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] service_user.send_service_user_token = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.935697] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] service_user.split_loggers = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.935867] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] service_user.timeout = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.936062] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] spice.agent_enabled = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.936232] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] spice.enabled = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.936544] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.936766] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.936945] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] spice.html5proxy_port = 6082 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.937122] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] spice.image_compression = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.937286] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] spice.jpeg_compression = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.937444] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] spice.playback_compression = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.937607] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] spice.require_secure = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.937778] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] spice.server_listen = 127.0.0.1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.937949] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.938251] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] spice.spice_direct_proxy_base_url = http://127.0.0.1:13002/nova {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.938426] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] spice.streaming_mode = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.938588] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] spice.zlib_compression = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.938778] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] upgrade_levels.baseapi = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.938965] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] upgrade_levels.compute = auto {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.939145] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] upgrade_levels.conductor = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.939309] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] upgrade_levels.scheduler = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.939478] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vendordata_dynamic_auth.auth_section = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.939645] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vendordata_dynamic_auth.auth_type = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.939839] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vendordata_dynamic_auth.cafile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.940018] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vendordata_dynamic_auth.certfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.940197] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.940359] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vendordata_dynamic_auth.insecure = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.940521] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vendordata_dynamic_auth.keyfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.940685] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.940844] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vendordata_dynamic_auth.timeout = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.941021] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vmware.api_retry_count = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.941188] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vmware.ca_file = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.941361] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vmware.cache_prefix = devstack-image-cache {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.941530] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vmware.cluster_name = testcl1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.941696] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vmware.connection_pool_size = 10 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.941858] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vmware.console_delay_seconds = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.942038] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vmware.datastore_regex = ^datastore.* {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.942258] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.942434] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vmware.host_password = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.942602] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vmware.host_port = 443 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.942772] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vmware.host_username = administrator@vsphere.local {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.942941] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vmware.insecure = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.943119] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vmware.integration_bridge = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.943286] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vmware.maximum_objects = 100 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.943445] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vmware.pbm_default_policy = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.943624] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vmware.pbm_enabled = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.943794] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vmware.pbm_wsdl_location = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.943968] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.944144] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vmware.serial_port_proxy_uri = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.944304] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vmware.serial_port_service_uri = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.944471] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vmware.task_poll_interval = 0.5 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.944645] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vmware.use_linked_clone = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.944815] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vmware.vnc_keymap = en-us {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.944984] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vmware.vnc_port = 5900 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.945161] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vmware.vnc_port_total = 10000 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.945345] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vnc.auth_schemes = ['none'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.945521] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vnc.enabled = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.945838] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.946043] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.946221] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vnc.novncproxy_port = 6080 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.946421] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vnc.server_listen = 127.0.0.1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.946606] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.946810] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vnc.vencrypt_ca_certs = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.946985] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vnc.vencrypt_client_cert = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.947163] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vnc.vencrypt_client_key = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.947345] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.947509] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] workarounds.disable_deep_image_inspection = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.947671] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.947833] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.947998] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.948176] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] workarounds.disable_rootwrap = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.948338] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] workarounds.enable_numa_live_migration = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.948500] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.948661] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.948892] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.949105] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] workarounds.libvirt_disable_apic = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.949278] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.949444] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.949784] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.950008] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.950195] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.950364] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.950530] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.950694] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.950860] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.951038] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.951230] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.951404] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] wsgi.client_socket_timeout = 900 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.951589] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] wsgi.default_pool_size = 1000 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.951792] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] wsgi.keep_alive = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.951968] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] wsgi.max_header_line = 16384 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.952153] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] wsgi.secure_proxy_ssl_header = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.952320] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] wsgi.ssl_ca_file = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.952484] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] wsgi.ssl_cert_file = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.952645] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] wsgi.ssl_key_file = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.952811] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] wsgi.tcp_keepidle = 600 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.952993] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.953183] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] zvm.ca_file = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.953347] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] zvm.cloud_connector_url = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.953632] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.953811] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] zvm.reachable_timeout = 300 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.953989] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.954187] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.954367] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] profiler.connection_string = messaging:// {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.954536] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] profiler.enabled = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.954708] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] profiler.es_doc_type = notification {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.954877] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] profiler.es_scroll_size = 10000 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.955058] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] profiler.es_scroll_time = 2m {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.955226] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] profiler.filter_error_trace = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.955394] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] profiler.hmac_keys = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.955562] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] profiler.sentinel_service_name = mymaster {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.955756] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] profiler.socket_timeout = 0.1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.955930] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] profiler.trace_requests = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.956109] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] profiler.trace_sqlalchemy = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.956295] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] profiler_jaeger.process_tags = {} {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.956457] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] profiler_jaeger.service_name_prefix = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.956621] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] profiler_otlp.service_name_prefix = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.956788] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] remote_debug.host = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.956949] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] remote_debug.port = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.957138] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.957303] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.957464] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.957625] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.957849] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.958045] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.958216] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.958380] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.958544] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.958716] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.958877] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.959063] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.959237] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.959406] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.959575] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_rabbit.kombu_reconnect_splay = 0.0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.959749] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.959917] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.960093] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.960270] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.960434] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.960597] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.960763] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.960990] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.961280] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.961557] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.961820] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.962123] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.962396] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.962640] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.962915] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.963173] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_rabbit.ssl = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.963391] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.963591] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.963740] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.963913] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.964098] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_rabbit.ssl_version = {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.964267] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.964455] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.964623] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_notifications.retry = -1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.964802] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.964973] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_messaging_notifications.transport_url = **** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.965162] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_limit.auth_section = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.965327] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_limit.auth_type = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.965508] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_limit.cafile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.965733] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_limit.certfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.965919] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_limit.collect_timing = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.966098] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_limit.connect_retries = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.966265] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_limit.connect_retry_delay = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.966427] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_limit.endpoint_id = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.966601] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_limit.endpoint_interface = publicURL {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.966789] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_limit.endpoint_override = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.966959] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_limit.endpoint_region_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.967134] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_limit.endpoint_service_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.967296] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_limit.endpoint_service_type = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.967460] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_limit.insecure = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.967620] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_limit.keyfile = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.967778] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_limit.max_version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.967938] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_limit.min_version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.968113] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_limit.region_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.968275] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_limit.retriable_status_codes = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.968434] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_limit.service_name = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.968594] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_limit.service_type = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.968755] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_limit.split_loggers = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.968917] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_limit.status_code_retries = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.969092] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_limit.status_code_retry_delay = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.969255] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_limit.timeout = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.969415] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_limit.valid_interfaces = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.969625] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_limit.version = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.969814] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_reports.file_event_handler = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.969984] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.970167] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] oslo_reports.log_dir = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.970338] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.970499] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.970659] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.970828] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.970992] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.971168] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.971337] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.971497] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vif_plug_ovs_privileged.group = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.971685] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.971864] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.972041] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.972205] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] vif_plug_ovs_privileged.user = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.972377] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] os_vif_linux_bridge.flat_interface = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.972559] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.972734] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.972907] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.973092] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.973277] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.973486] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.973708] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.973904] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.974097] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] os_vif_ovs.isolate_vif = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.974270] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.974437] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.974607] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.974784] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] os_vif_ovs.ovsdb_interface = native {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.974948] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] os_vif_ovs.per_port_bridge = False {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.975134] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] privsep_osbrick.capabilities = [21] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.975297] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] privsep_osbrick.group = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.975457] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] privsep_osbrick.helper_command = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.975643] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.975850] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.976033] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] privsep_osbrick.user = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.976214] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.976376] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] nova_sys_admin.group = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.976535] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] nova_sys_admin.helper_command = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.976706] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.976873] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.977045] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] nova_sys_admin.user = None {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 544.977180] env[69994]: DEBUG oslo_service.backend.eventlet.service [None req-09e6b48b-f62c-4285-8242-2d2cbaa1ba28 None None] ******************************************************************************** {{(pid=69994) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 544.977605] env[69994]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 545.481432] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d7786f87-449c-47ae-815c-704719c959b2 None None] Getting list of instances from cluster (obj){ [ 545.481432] env[69994]: value = "domain-c8" [ 545.481432] env[69994]: _type = "ClusterComputeResource" [ 545.481432] env[69994]: } {{(pid=69994) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 545.482534] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2ba4588-2f1b-41f1-b8fd-46af765d306a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.492362] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d7786f87-449c-47ae-815c-704719c959b2 None None] Got total of 0 instances {{(pid=69994) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 545.492930] env[69994]: WARNING nova.virt.vmwareapi.driver [None req-d7786f87-449c-47ae-815c-704719c959b2 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 545.493426] env[69994]: INFO nova.virt.node [None req-d7786f87-449c-47ae-815c-704719c959b2 None None] Generated node identity 92ce3c95-4efe-4d04-802b-6b187afc5aa7 [ 545.493662] env[69994]: INFO nova.virt.node [None req-d7786f87-449c-47ae-815c-704719c959b2 None None] Wrote node identity 92ce3c95-4efe-4d04-802b-6b187afc5aa7 to /opt/stack/data/n-cpu-1/compute_id [ 545.996545] env[69994]: WARNING nova.compute.manager [None req-d7786f87-449c-47ae-815c-704719c959b2 None None] Compute nodes ['92ce3c95-4efe-4d04-802b-6b187afc5aa7'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 547.002086] env[69994]: INFO nova.compute.manager [None req-d7786f87-449c-47ae-815c-704719c959b2 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 548.008121] env[69994]: WARNING nova.compute.manager [None req-d7786f87-449c-47ae-815c-704719c959b2 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 548.008441] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d7786f87-449c-47ae-815c-704719c959b2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 548.008611] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d7786f87-449c-47ae-815c-704719c959b2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 548.008762] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d7786f87-449c-47ae-815c-704719c959b2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 548.008914] env[69994]: DEBUG nova.compute.resource_tracker [None req-d7786f87-449c-47ae-815c-704719c959b2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69994) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 548.009841] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-862542da-4936-455e-a7c6-76b5fc96243e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.017988] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e632c08c-ca45-4fd0-8957-1d48aba201eb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.032491] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23b0e6f1-44d2-405d-bcf7-e1bdf4741f5a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.040147] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4972c74-e029-4fc5-9d34-57c3e1cd8fd0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.067687] env[69994]: DEBUG nova.compute.resource_tracker [None req-d7786f87-449c-47ae-815c-704719c959b2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181001MB free_disk=120GB free_vcpus=48 pci_devices=None {{(pid=69994) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 548.067835] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d7786f87-449c-47ae-815c-704719c959b2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 548.068059] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d7786f87-449c-47ae-815c-704719c959b2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 548.571016] env[69994]: WARNING nova.compute.resource_tracker [None req-d7786f87-449c-47ae-815c-704719c959b2 None None] No compute node record for cpu-1:92ce3c95-4efe-4d04-802b-6b187afc5aa7: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 92ce3c95-4efe-4d04-802b-6b187afc5aa7 could not be found. [ 549.074530] env[69994]: INFO nova.compute.resource_tracker [None req-d7786f87-449c-47ae-815c-704719c959b2 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 [ 550.582044] env[69994]: DEBUG nova.compute.resource_tracker [None req-d7786f87-449c-47ae-815c-704719c959b2 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 550.582414] env[69994]: DEBUG nova.compute.resource_tracker [None req-d7786f87-449c-47ae-815c-704719c959b2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 550.742157] env[69994]: INFO nova.scheduler.client.report [None req-d7786f87-449c-47ae-815c-704719c959b2 None None] [req-0e74e616-700d-4951-b295-fb6e70580327] Created resource provider record via placement API for resource provider with UUID 92ce3c95-4efe-4d04-802b-6b187afc5aa7 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 550.759450] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f2f770a-3a2a-4712-8854-9c4d07cb5dc7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.767618] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51e19ad8-f87c-4e27-968b-1a37213146f2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.797630] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a1f5abc-f8ea-4642-bf89-268df76409d4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.804674] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b33fcbf5-8589-4159-b3d9-284049f132f9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.817775] env[69994]: DEBUG nova.compute.provider_tree [None req-d7786f87-449c-47ae-815c-704719c959b2 None None] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 551.351417] env[69994]: DEBUG nova.scheduler.client.report [None req-d7786f87-449c-47ae-815c-704719c959b2 None None] Updated inventory for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 551.351645] env[69994]: DEBUG nova.compute.provider_tree [None req-d7786f87-449c-47ae-815c-704719c959b2 None None] Updating resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 generation from 0 to 1 during operation: update_inventory {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 551.351784] env[69994]: DEBUG nova.compute.provider_tree [None req-d7786f87-449c-47ae-815c-704719c959b2 None None] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 551.402555] env[69994]: DEBUG nova.compute.provider_tree [None req-d7786f87-449c-47ae-815c-704719c959b2 None None] Updating resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 generation from 1 to 2 during operation: update_traits {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 551.907228] env[69994]: DEBUG nova.compute.resource_tracker [None req-d7786f87-449c-47ae-815c-704719c959b2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69994) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 551.907557] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d7786f87-449c-47ae-815c-704719c959b2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.839s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 551.907613] env[69994]: DEBUG nova.service [None req-d7786f87-449c-47ae-815c-704719c959b2 None None] Creating RPC server for service compute {{(pid=69994) start /opt/stack/nova/nova/service.py:186}} [ 551.920866] env[69994]: DEBUG nova.service [None req-d7786f87-449c-47ae-815c-704719c959b2 None None] Join ServiceGroup membership for this service compute {{(pid=69994) start /opt/stack/nova/nova/service.py:203}} [ 551.921127] env[69994]: DEBUG nova.servicegroup.drivers.db [None req-d7786f87-449c-47ae-815c-704719c959b2 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=69994) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 593.712893] env[69994]: DEBUG oslo_concurrency.lockutils [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Acquiring lock "2e374549-00a2-4014-90e0-ceccbe4360fa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 593.713222] env[69994]: DEBUG oslo_concurrency.lockutils [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Lock "2e374549-00a2-4014-90e0-ceccbe4360fa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 593.946568] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Acquiring lock "f3945280-ee10-426b-bcab-3e52e8779c55" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 593.948755] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Lock "f3945280-ee10-426b-bcab-3e52e8779c55" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 594.128065] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Acquiring lock "91666839-f440-499e-acf0-07d352e701ab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 594.128632] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Lock "91666839-f440-499e-acf0-07d352e701ab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 594.216237] env[69994]: DEBUG nova.compute.manager [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 594.453143] env[69994]: DEBUG nova.compute.manager [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 594.631396] env[69994]: DEBUG nova.compute.manager [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] [instance: 91666839-f440-499e-acf0-07d352e701ab] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 594.759011] env[69994]: DEBUG oslo_concurrency.lockutils [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 594.761086] env[69994]: DEBUG oslo_concurrency.lockutils [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.003s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 594.763010] env[69994]: INFO nova.compute.claims [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 594.988419] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 595.167125] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 595.658382] env[69994]: DEBUG oslo_concurrency.lockutils [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Acquiring lock "2244e8ad-75f6-42bc-a97d-7f26eaba1aa2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 595.658772] env[69994]: DEBUG oslo_concurrency.lockutils [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Lock "2244e8ad-75f6-42bc-a97d-7f26eaba1aa2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 595.707248] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquiring lock "dbad6bed-64ba-4dfd-abad-c0b2c775ba2c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 595.707440] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Lock "dbad6bed-64ba-4dfd-abad-c0b2c775ba2c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 595.914694] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c19dfac-bcb3-4abe-be06-af17377f50a5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.923760] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa72e145-220e-4e5f-a1d3-524bc9ae08c9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.927362] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._sync_power_states {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 595.967896] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7882d5c9-5b23-4241-afa1-3a219b5cb430 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.979573] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51efec96-83c6-4316-a345-f05164f07157 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.999711] env[69994]: DEBUG nova.compute.provider_tree [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 596.162389] env[69994]: DEBUG nova.compute.manager [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 596.210155] env[69994]: DEBUG nova.compute.manager [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 596.430890] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Getting list of instances from cluster (obj){ [ 596.430890] env[69994]: value = "domain-c8" [ 596.430890] env[69994]: _type = "ClusterComputeResource" [ 596.430890] env[69994]: } {{(pid=69994) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 596.433248] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d83da5a2-a67c-45a6-949c-0739ac530059 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.443064] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Got total of 0 instances {{(pid=69994) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 596.443372] env[69994]: WARNING nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] While synchronizing instance power states, found 1 instances in the database and 0 instances on the hypervisor. [ 596.443450] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Triggering sync for uuid 2e374549-00a2-4014-90e0-ceccbe4360fa {{(pid=69994) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 596.443778] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Acquiring lock "2e374549-00a2-4014-90e0-ceccbe4360fa" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 596.444101] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 596.444400] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Getting list of instances from cluster (obj){ [ 596.444400] env[69994]: value = "domain-c8" [ 596.444400] env[69994]: _type = "ClusterComputeResource" [ 596.444400] env[69994]: } {{(pid=69994) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 596.445391] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-866ae183-100b-49e0-9187-a52eec1c216e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.454390] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Got total of 0 instances {{(pid=69994) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 596.503225] env[69994]: DEBUG nova.scheduler.client.report [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 596.702581] env[69994]: DEBUG oslo_concurrency.lockutils [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 596.739009] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 597.010164] env[69994]: DEBUG oslo_concurrency.lockutils [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.250s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 597.012782] env[69994]: DEBUG nova.compute.manager [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 597.013801] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.026s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 597.015824] env[69994]: INFO nova.compute.claims [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 597.521206] env[69994]: DEBUG nova.compute.utils [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 597.522739] env[69994]: DEBUG nova.compute.manager [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 597.522860] env[69994]: DEBUG nova.network.neutron [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 598.029657] env[69994]: DEBUG nova.compute.manager [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 598.152812] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3076b8b9-db3a-47ee-9c5c-b3ae58ee7b80 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.170023] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e53944fa-cec7-4c29-a6c3-e0b0fba35a40 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.209350] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cf72b47-b249-4ddb-aa91-6ea85de61465 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.218186] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aec41abb-89f9-4e57-ad26-682d6f7ba3a5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.233918] env[69994]: DEBUG nova.compute.provider_tree [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 598.436257] env[69994]: DEBUG nova.policy [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3d38df417e9f44d39a794ea2ad0ce29d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eea3a16a357a4c818ee5b3b69c9149cb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 598.742930] env[69994]: DEBUG nova.scheduler.client.report [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 599.044809] env[69994]: DEBUG nova.compute.manager [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 599.084786] env[69994]: DEBUG nova.virt.hardware [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 599.084960] env[69994]: DEBUG nova.virt.hardware [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 599.085185] env[69994]: DEBUG nova.virt.hardware [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 599.085385] env[69994]: DEBUG nova.virt.hardware [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 599.085551] env[69994]: DEBUG nova.virt.hardware [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 599.085718] env[69994]: DEBUG nova.virt.hardware [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 599.085990] env[69994]: DEBUG nova.virt.hardware [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 599.086222] env[69994]: DEBUG nova.virt.hardware [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 599.086697] env[69994]: DEBUG nova.virt.hardware [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 599.086908] env[69994]: DEBUG nova.virt.hardware [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 599.087070] env[69994]: DEBUG nova.virt.hardware [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 599.089441] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70301688-38b2-45cf-a743-f90c4a5b360b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.100933] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40d2a96e-0bc9-4b06-9186-1a69140c66ed {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.123581] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9231a4a-801f-4f2f-9f34-41714085b24e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.247917] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.233s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 599.247917] env[69994]: DEBUG nova.compute.manager [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 599.257022] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.086s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 599.257022] env[69994]: INFO nova.compute.claims [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] [instance: 91666839-f440-499e-acf0-07d352e701ab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 599.599142] env[69994]: DEBUG nova.network.neutron [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Successfully created port: de9c0db5-bb4c-466b-ab75-f2d6b988725d {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 599.760769] env[69994]: DEBUG nova.compute.utils [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 599.763932] env[69994]: DEBUG nova.compute.manager [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Not allocating networking since 'none' was specified. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 599.978564] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Acquiring lock "15d17772-ac57-49a3-b261-bf49b902f658" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 599.978843] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Lock "15d17772-ac57-49a3-b261-bf49b902f658" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 600.157602] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 600.157869] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 600.158064] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 600.158287] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 600.158456] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 600.158669] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 600.158889] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 600.159111] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69994) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 600.160064] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 600.269069] env[69994]: DEBUG nova.compute.manager [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 600.406192] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-185f7f35-50c5-403b-9a76-9845f15b27a5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.414211] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ec1818f-0bcb-4637-8214-7cf37b3b6856 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.448317] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cd2b647-83c2-4356-a60a-b793ab4870e4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.456785] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccf75e29-e731-4b35-984d-d056324d060c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.471566] env[69994]: DEBUG nova.compute.provider_tree [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 600.481571] env[69994]: DEBUG nova.compute.manager [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 600.664283] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 600.975385] env[69994]: DEBUG nova.scheduler.client.report [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 601.025089] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 601.280612] env[69994]: DEBUG nova.compute.manager [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 601.317115] env[69994]: DEBUG nova.virt.hardware [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 601.317430] env[69994]: DEBUG nova.virt.hardware [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 601.317543] env[69994]: DEBUG nova.virt.hardware [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 601.317736] env[69994]: DEBUG nova.virt.hardware [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 601.317883] env[69994]: DEBUG nova.virt.hardware [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 601.318274] env[69994]: DEBUG nova.virt.hardware [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 601.318898] env[69994]: DEBUG nova.virt.hardware [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 601.318898] env[69994]: DEBUG nova.virt.hardware [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 601.318898] env[69994]: DEBUG nova.virt.hardware [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 601.319050] env[69994]: DEBUG nova.virt.hardware [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 601.319266] env[69994]: DEBUG nova.virt.hardware [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 601.321148] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d28ae63-2e52-4cae-aad0-11d0b909fc65 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.330463] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75a38c88-c7b8-430c-a4ad-ca713c71f80e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.348887] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Instance VIF info [] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 601.359299] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 601.359903] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f12c6480-629b-44f8-b50d-cc857aa50942 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.374776] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Created folder: OpenStack in parent group-v4. [ 601.374986] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Creating folder: Project (173b319d5f1c41a5af0342b7f94d9d42). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 601.375623] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1be21b23-1762-461e-8238-e7271e158026 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.383944] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Created folder: Project (173b319d5f1c41a5af0342b7f94d9d42) in parent group-v647729. [ 601.384267] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Creating folder: Instances. Parent ref: group-v647730. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 601.384405] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6983e409-faaa-4934-a383-773688248c51 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.393574] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Created folder: Instances in parent group-v647730. [ 601.393574] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 601.393574] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 601.393681] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1a43da7d-a5ac-4fd0-b8d0-7c39ff490f9f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.413023] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 601.413023] env[69994]: value = "task-3241178" [ 601.413023] env[69994]: _type = "Task" [ 601.413023] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.420550] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241178, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.448100] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Acquiring lock "e9bc15f9-e957-487f-b8d5-d1332b185dcf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 601.448100] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Lock "e9bc15f9-e957-487f-b8d5-d1332b185dcf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 601.486022] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.231s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 601.486022] env[69994]: DEBUG nova.compute.manager [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] [instance: 91666839-f440-499e-acf0-07d352e701ab] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 601.488061] env[69994]: DEBUG oslo_concurrency.lockutils [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.787s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 601.490155] env[69994]: INFO nova.compute.claims [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 601.925910] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241178, 'name': CreateVM_Task, 'duration_secs': 0.341529} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.926188] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 601.927257] env[69994]: DEBUG oslo_vmware.service [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f513e6f3-2a16-450a-9e14-95c254a302a5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.936066] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 601.936066] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 601.936569] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 601.937388] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0cf20d8c-0918-4762-805d-f0247c6b79b8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.941419] env[69994]: DEBUG oslo_vmware.api [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Waiting for the task: (returnval){ [ 601.941419] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]527d4007-1e93-ba01-ffde-ee3bb255fcc0" [ 601.941419] env[69994]: _type = "Task" [ 601.941419] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.949846] env[69994]: DEBUG nova.compute.manager [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: e9bc15f9-e957-487f-b8d5-d1332b185dcf] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 601.952298] env[69994]: DEBUG oslo_vmware.api [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]527d4007-1e93-ba01-ffde-ee3bb255fcc0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.997539] env[69994]: DEBUG nova.compute.utils [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 602.005443] env[69994]: DEBUG nova.compute.manager [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] [instance: 91666839-f440-499e-acf0-07d352e701ab] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 602.005638] env[69994]: DEBUG nova.network.neutron [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] [instance: 91666839-f440-499e-acf0-07d352e701ab] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 602.196691] env[69994]: DEBUG nova.policy [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a3d82eb3c4cc4d9690b8a5b3ef7cd3d9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f46255739e0f4fb2b40023c296e800cc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 602.455457] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 602.455821] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 602.456339] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.460231] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 602.462906] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 602.464025] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5fea0c5c-0e4f-432b-a92d-4a8bd9bce9c1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.484216] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 602.484216] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 602.484216] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8f8cb09-4386-4e50-85a0-afc5405a8fe1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.491880] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 602.499724] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b6393af-f6d4-4630-b636-0b14b7344180 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.506180] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Acquiring lock "6ca85dc6-ace9-4c5e-a11e-a3d5060d766b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 602.506180] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Lock "6ca85dc6-ace9-4c5e-a11e-a3d5060d766b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 602.509010] env[69994]: DEBUG nova.compute.manager [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] [instance: 91666839-f440-499e-acf0-07d352e701ab] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 602.514534] env[69994]: DEBUG oslo_vmware.api [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Waiting for the task: (returnval){ [ 602.514534] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52a87f02-e7f1-c512-63b7-04a3487d05bb" [ 602.514534] env[69994]: _type = "Task" [ 602.514534] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.525222] env[69994]: DEBUG oslo_vmware.api [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52a87f02-e7f1-c512-63b7-04a3487d05bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.681486] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cab963c9-ab2b-441a-80a0-155742a2768f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.694111] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08b7abcc-facb-4ef7-b87c-d3a483e791e2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.738190] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-add946be-5525-4c51-be40-35819cad2600 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.745708] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b63c838d-c785-401b-ae09-14f77b3503b4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.762705] env[69994]: DEBUG nova.compute.provider_tree [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 603.011884] env[69994]: DEBUG nova.compute.manager [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 603.038772] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Preparing fetch location {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 603.039554] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Creating directory with path [datastore2] vmware_temp/b5ba74b7-e221-4222-93e6-b7190bc54589/cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 603.039554] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7f7cd5f2-b42b-471d-9c5b-2e1567e10fba {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.078242] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Created directory with path [datastore2] vmware_temp/b5ba74b7-e221-4222-93e6-b7190bc54589/cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 603.078463] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Fetch image to [datastore2] vmware_temp/b5ba74b7-e221-4222-93e6-b7190bc54589/cc2e14cc-b12f-480a-a387-dd21e9efda8b/tmp-sparse.vmdk {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 603.078639] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Downloading image file data cc2e14cc-b12f-480a-a387-dd21e9efda8b to [datastore2] vmware_temp/b5ba74b7-e221-4222-93e6-b7190bc54589/cc2e14cc-b12f-480a-a387-dd21e9efda8b/tmp-sparse.vmdk on the data store datastore2 {{(pid=69994) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 603.079488] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3d0f279-3bed-46dc-9e7a-702d42726327 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.092130] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa289175-6f8d-4391-9526-67a04a99c850 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.107144] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c4f30b2-b1b4-460e-af4e-24c2f04e20e3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.142183] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbbf1328-33f5-4d20-a453-774ba59a8339 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.151732] env[69994]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a5715721-cc09-462d-b436-2c21655ec869 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.189760] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Downloading image file data cc2e14cc-b12f-480a-a387-dd21e9efda8b to the data store datastore2 {{(pid=69994) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 603.268876] env[69994]: DEBUG nova.scheduler.client.report [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 603.282372] env[69994]: DEBUG oslo_vmware.rw_handles [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b5ba74b7-e221-4222-93e6-b7190bc54589/cc2e14cc-b12f-480a-a387-dd21e9efda8b/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69994) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 603.535180] env[69994]: DEBUG nova.compute.manager [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] [instance: 91666839-f440-499e-acf0-07d352e701ab] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 603.574624] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 603.591069] env[69994]: DEBUG nova.virt.hardware [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 603.591069] env[69994]: DEBUG nova.virt.hardware [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 603.592269] env[69994]: DEBUG nova.virt.hardware [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 603.592507] env[69994]: DEBUG nova.virt.hardware [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 603.592663] env[69994]: DEBUG nova.virt.hardware [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 603.592810] env[69994]: DEBUG nova.virt.hardware [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 603.593047] env[69994]: DEBUG nova.virt.hardware [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 603.593331] env[69994]: DEBUG nova.virt.hardware [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 603.593479] env[69994]: DEBUG nova.virt.hardware [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 603.593646] env[69994]: DEBUG nova.virt.hardware [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 603.593855] env[69994]: DEBUG nova.virt.hardware [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 603.596907] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c0c13c7-66e7-4443-9e20-105a0386272e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.617565] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b637dd25-569d-4740-88be-53ea249d5d5c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.779930] env[69994]: DEBUG oslo_concurrency.lockutils [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.291s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 603.779930] env[69994]: DEBUG nova.compute.manager [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 603.790393] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.050s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 603.791498] env[69994]: INFO nova.compute.claims [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 603.947999] env[69994]: DEBUG oslo_vmware.rw_handles [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Completed reading data from the image iterator. {{(pid=69994) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 603.948241] env[69994]: DEBUG oslo_vmware.rw_handles [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b5ba74b7-e221-4222-93e6-b7190bc54589/cc2e14cc-b12f-480a-a387-dd21e9efda8b/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 604.011750] env[69994]: DEBUG nova.network.neutron [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Successfully updated port: de9c0db5-bb4c-466b-ab75-f2d6b988725d {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 604.077183] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Downloaded image file data cc2e14cc-b12f-480a-a387-dd21e9efda8b to vmware_temp/b5ba74b7-e221-4222-93e6-b7190bc54589/cc2e14cc-b12f-480a-a387-dd21e9efda8b/tmp-sparse.vmdk on the data store datastore2 {{(pid=69994) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 604.079568] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Caching image {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 604.081043] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Copying Virtual Disk [datastore2] vmware_temp/b5ba74b7-e221-4222-93e6-b7190bc54589/cc2e14cc-b12f-480a-a387-dd21e9efda8b/tmp-sparse.vmdk to [datastore2] vmware_temp/b5ba74b7-e221-4222-93e6-b7190bc54589/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 604.081043] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-770a65fc-6629-43b2-b265-a59807ad1369 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.088791] env[69994]: DEBUG oslo_vmware.api [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Waiting for the task: (returnval){ [ 604.088791] env[69994]: value = "task-3241179" [ 604.088791] env[69994]: _type = "Task" [ 604.088791] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.098617] env[69994]: DEBUG oslo_vmware.api [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Task: {'id': task-3241179, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.134273] env[69994]: DEBUG nova.network.neutron [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] [instance: 91666839-f440-499e-acf0-07d352e701ab] Successfully created port: ce41f31f-0a63-4393-90a3-de51de55789e {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 604.254572] env[69994]: DEBUG nova.compute.manager [req-37c8ac26-ac61-4f13-a8a3-c1500e84649c req-dae86762-e4d5-4dae-a018-388b5cd5d32e service nova] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Received event network-vif-plugged-de9c0db5-bb4c-466b-ab75-f2d6b988725d {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 604.254814] env[69994]: DEBUG oslo_concurrency.lockutils [req-37c8ac26-ac61-4f13-a8a3-c1500e84649c req-dae86762-e4d5-4dae-a018-388b5cd5d32e service nova] Acquiring lock "2e374549-00a2-4014-90e0-ceccbe4360fa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 604.255090] env[69994]: DEBUG oslo_concurrency.lockutils [req-37c8ac26-ac61-4f13-a8a3-c1500e84649c req-dae86762-e4d5-4dae-a018-388b5cd5d32e service nova] Lock "2e374549-00a2-4014-90e0-ceccbe4360fa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 604.255208] env[69994]: DEBUG oslo_concurrency.lockutils [req-37c8ac26-ac61-4f13-a8a3-c1500e84649c req-dae86762-e4d5-4dae-a018-388b5cd5d32e service nova] Lock "2e374549-00a2-4014-90e0-ceccbe4360fa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 604.255420] env[69994]: DEBUG nova.compute.manager [req-37c8ac26-ac61-4f13-a8a3-c1500e84649c req-dae86762-e4d5-4dae-a018-388b5cd5d32e service nova] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] No waiting events found dispatching network-vif-plugged-de9c0db5-bb4c-466b-ab75-f2d6b988725d {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 604.255734] env[69994]: WARNING nova.compute.manager [req-37c8ac26-ac61-4f13-a8a3-c1500e84649c req-dae86762-e4d5-4dae-a018-388b5cd5d32e service nova] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Received unexpected event network-vif-plugged-de9c0db5-bb4c-466b-ab75-f2d6b988725d for instance with vm_state building and task_state spawning. [ 604.296980] env[69994]: DEBUG nova.compute.utils [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 604.300967] env[69994]: DEBUG nova.compute.manager [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 604.301164] env[69994]: DEBUG nova.network.neutron [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 604.505361] env[69994]: DEBUG nova.policy [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fe6552e09b9e4afa96aa8d1960046951', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fbd311dd96cb4ab5b0b75bf914842ce8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 604.514848] env[69994]: DEBUG oslo_concurrency.lockutils [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Acquiring lock "refresh_cache-2e374549-00a2-4014-90e0-ceccbe4360fa" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.515057] env[69994]: DEBUG oslo_concurrency.lockutils [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Acquired lock "refresh_cache-2e374549-00a2-4014-90e0-ceccbe4360fa" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 604.515284] env[69994]: DEBUG nova.network.neutron [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 604.604404] env[69994]: DEBUG oslo_vmware.api [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Task: {'id': task-3241179, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.801474] env[69994]: DEBUG nova.compute.manager [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 604.975027] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ec24818-cbf5-4730-8d36-661d97edfca6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.983100] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-840d336a-e9ec-483d-8553-564a4e19f70f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.037446] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22eca20f-77d9-411d-985d-3f58b65a6947 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.049682] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e81d6756-0db6-4e73-8f64-5d457caeba20 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.065314] env[69994]: DEBUG nova.compute.provider_tree [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 605.105024] env[69994]: DEBUG oslo_vmware.api [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Task: {'id': task-3241179, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.68747} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.105367] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Copied Virtual Disk [datastore2] vmware_temp/b5ba74b7-e221-4222-93e6-b7190bc54589/cc2e14cc-b12f-480a-a387-dd21e9efda8b/tmp-sparse.vmdk to [datastore2] vmware_temp/b5ba74b7-e221-4222-93e6-b7190bc54589/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 605.105852] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Deleting the datastore file [datastore2] vmware_temp/b5ba74b7-e221-4222-93e6-b7190bc54589/cc2e14cc-b12f-480a-a387-dd21e9efda8b/tmp-sparse.vmdk {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 605.105852] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bf665511-f293-4c68-974e-ab19c9320e81 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.115766] env[69994]: DEBUG oslo_vmware.api [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Waiting for the task: (returnval){ [ 605.115766] env[69994]: value = "task-3241180" [ 605.115766] env[69994]: _type = "Task" [ 605.115766] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.131793] env[69994]: DEBUG oslo_vmware.api [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Task: {'id': task-3241180, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.242520] env[69994]: DEBUG oslo_concurrency.lockutils [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Acquiring lock "84efe900-1d79-42f9-b3c6-54299757cdbc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 605.243024] env[69994]: DEBUG oslo_concurrency.lockutils [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Lock "84efe900-1d79-42f9-b3c6-54299757cdbc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 605.338113] env[69994]: DEBUG nova.network.neutron [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 605.572193] env[69994]: DEBUG nova.scheduler.client.report [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 605.631787] env[69994]: DEBUG oslo_vmware.api [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Task: {'id': task-3241180, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.033499} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.632839] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 605.632839] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Moving file from [datastore2] vmware_temp/b5ba74b7-e221-4222-93e6-b7190bc54589/cc2e14cc-b12f-480a-a387-dd21e9efda8b to [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b. {{(pid=69994) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 605.632839] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-f3751df0-c551-4943-bff9-fe930c6d1daa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.642169] env[69994]: DEBUG oslo_vmware.api [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Waiting for the task: (returnval){ [ 605.642169] env[69994]: value = "task-3241181" [ 605.642169] env[69994]: _type = "Task" [ 605.642169] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.650558] env[69994]: DEBUG oslo_vmware.api [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Task: {'id': task-3241181, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.747700] env[69994]: DEBUG nova.compute.manager [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 605.827434] env[69994]: DEBUG nova.compute.manager [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 605.855294] env[69994]: DEBUG nova.virt.hardware [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 605.855511] env[69994]: DEBUG nova.virt.hardware [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 605.855668] env[69994]: DEBUG nova.virt.hardware [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 605.855856] env[69994]: DEBUG nova.virt.hardware [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 605.856121] env[69994]: DEBUG nova.virt.hardware [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 605.856193] env[69994]: DEBUG nova.virt.hardware [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 605.856838] env[69994]: DEBUG nova.virt.hardware [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 605.856838] env[69994]: DEBUG nova.virt.hardware [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 605.856838] env[69994]: DEBUG nova.virt.hardware [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 605.856838] env[69994]: DEBUG nova.virt.hardware [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 605.857032] env[69994]: DEBUG nova.virt.hardware [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 605.857923] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71665ae8-6fc8-41d5-be2b-9f3c2a515557 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.866896] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bb89b04-46db-4a87-8d63-9681ddb2c621 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.896668] env[69994]: DEBUG nova.network.neutron [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Updating instance_info_cache with network_info: [{"id": "de9c0db5-bb4c-466b-ab75-f2d6b988725d", "address": "fa:16:3e:54:84:a1", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.15", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde9c0db5-bb", "ovs_interfaceid": "de9c0db5-bb4c-466b-ab75-f2d6b988725d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.082211] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.293s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 606.083344] env[69994]: DEBUG nova.compute.manager [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 606.093589] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 5.429s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 606.097021] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 606.097021] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69994) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 606.097021] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.069s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 606.097209] env[69994]: INFO nova.compute.claims [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 606.101150] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-192dd10a-caeb-4760-9082-9cc58b0d4ae7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.114711] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c56cd7fa-2f21-4d58-9893-8f29424c8ad3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.123163] env[69994]: DEBUG nova.network.neutron [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Successfully created port: b1e9d712-4ee9-4431-b7ed-f8221fb33b62 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 606.156232] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c4120e6-d205-487c-96f7-ee91ad3317b7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.165826] env[69994]: DEBUG oslo_vmware.api [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Task: {'id': task-3241181, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.025594} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.168017] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] File moved {{(pid=69994) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 606.168252] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Cleaning up location [datastore2] vmware_temp/b5ba74b7-e221-4222-93e6-b7190bc54589 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 606.168427] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Deleting the datastore file [datastore2] vmware_temp/b5ba74b7-e221-4222-93e6-b7190bc54589 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 606.168713] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6e6a8f3b-4f62-426a-b9c6-478594821eeb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.177272] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81a42069-c47c-4b63-924e-d957d14a1704 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.182893] env[69994]: DEBUG oslo_vmware.api [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Waiting for the task: (returnval){ [ 606.182893] env[69994]: value = "task-3241182" [ 606.182893] env[69994]: _type = "Task" [ 606.182893] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.218094] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181049MB free_disk=120GB free_vcpus=48 pci_devices=None {{(pid=69994) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 606.218317] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 606.224037] env[69994]: DEBUG oslo_vmware.api [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Task: {'id': task-3241182, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.02982} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.224299] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 606.225103] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af5a59bf-3793-4015-945a-068ba269522f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.230570] env[69994]: DEBUG oslo_vmware.api [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Waiting for the task: (returnval){ [ 606.230570] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52498e4d-d1c6-926d-dc31-1e614460f8e0" [ 606.230570] env[69994]: _type = "Task" [ 606.230570] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.239731] env[69994]: DEBUG oslo_vmware.api [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52498e4d-d1c6-926d-dc31-1e614460f8e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.277570] env[69994]: DEBUG oslo_concurrency.lockutils [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 606.400474] env[69994]: DEBUG oslo_concurrency.lockutils [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Releasing lock "refresh_cache-2e374549-00a2-4014-90e0-ceccbe4360fa" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 606.400474] env[69994]: DEBUG nova.compute.manager [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Instance network_info: |[{"id": "de9c0db5-bb4c-466b-ab75-f2d6b988725d", "address": "fa:16:3e:54:84:a1", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.15", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde9c0db5-bb", "ovs_interfaceid": "de9c0db5-bb4c-466b-ab75-f2d6b988725d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 606.400738] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:54:84:a1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0cd5d325-3053-407e-a4ee-f627e82a23f9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'de9c0db5-bb4c-466b-ab75-f2d6b988725d', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 606.414445] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Creating folder: Project (eea3a16a357a4c818ee5b3b69c9149cb). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 606.415043] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-72da819f-40a6-481d-83a6-a4d2a312993f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.429810] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Created folder: Project (eea3a16a357a4c818ee5b3b69c9149cb) in parent group-v647729. [ 606.430018] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Creating folder: Instances. Parent ref: group-v647733. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 606.430358] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d0fd183e-850a-4c40-8f43-cf81a781f6f0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.446233] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Created folder: Instances in parent group-v647733. [ 606.446233] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 606.446233] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 606.446233] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8d416841-c99b-4df0-945a-0bfd6ee63fc1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.471515] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 606.471515] env[69994]: value = "task-3241185" [ 606.471515] env[69994]: _type = "Task" [ 606.471515] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.479560] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241185, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.605719] env[69994]: DEBUG nova.compute.utils [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 606.607410] env[69994]: DEBUG nova.compute.manager [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 606.607601] env[69994]: DEBUG nova.network.neutron [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 606.753602] env[69994]: DEBUG oslo_vmware.api [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52498e4d-d1c6-926d-dc31-1e614460f8e0, 'name': SearchDatastore_Task, 'duration_secs': 0.009098} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.754265] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 606.754265] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] f3945280-ee10-426b-bcab-3e52e8779c55/f3945280-ee10-426b-bcab-3e52e8779c55.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 606.754265] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4920f107-a905-4994-a26d-682969287a8d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.761966] env[69994]: DEBUG oslo_vmware.api [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Waiting for the task: (returnval){ [ 606.761966] env[69994]: value = "task-3241186" [ 606.761966] env[69994]: _type = "Task" [ 606.761966] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.775860] env[69994]: DEBUG oslo_vmware.api [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Task: {'id': task-3241186, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.984497] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241185, 'name': CreateVM_Task, 'duration_secs': 0.45711} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.985046] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 607.014734] env[69994]: DEBUG oslo_concurrency.lockutils [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.015082] env[69994]: DEBUG oslo_concurrency.lockutils [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 607.015508] env[69994]: DEBUG oslo_concurrency.lockutils [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 607.015998] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2ed2e17-d4ed-4009-ba7a-9c526b3f8112 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.023900] env[69994]: DEBUG oslo_vmware.api [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Waiting for the task: (returnval){ [ 607.023900] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52d83f22-1d9d-7553-2bd2-f159b1937a49" [ 607.023900] env[69994]: _type = "Task" [ 607.023900] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.036017] env[69994]: DEBUG oslo_vmware.api [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52d83f22-1d9d-7553-2bd2-f159b1937a49, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.116576] env[69994]: DEBUG nova.compute.manager [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 607.154911] env[69994]: DEBUG nova.policy [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '29861e0318bb4e5fa5d92379b063367c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1aa7929b2e0d467c99c25acd8b7e92d6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 607.277739] env[69994]: DEBUG oslo_vmware.api [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Task: {'id': task-3241186, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.419331] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3d022a4-2e0c-4295-b45c-83c50ed4776d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.428019] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0afecd8-2c66-44af-8918-86ca82c7a16b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.464734] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb7cccd7-a849-4737-b897-5337f10334cf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.472305] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9810d8b7-4826-4f84-9aea-f9c269b5bf19 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.486471] env[69994]: DEBUG nova.compute.provider_tree [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 607.538327] env[69994]: DEBUG oslo_vmware.api [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52d83f22-1d9d-7553-2bd2-f159b1937a49, 'name': SearchDatastore_Task, 'duration_secs': 0.026561} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.538601] env[69994]: DEBUG oslo_concurrency.lockutils [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 607.538909] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 607.539641] env[69994]: DEBUG oslo_concurrency.lockutils [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.539641] env[69994]: DEBUG oslo_concurrency.lockutils [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 607.539641] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 607.539799] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1373ab32-deb3-408b-a713-dd32f654ef1d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.548627] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 607.548627] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 607.549099] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac66288e-6c26-4012-a038-c24db2475e63 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.556582] env[69994]: DEBUG oslo_vmware.api [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Waiting for the task: (returnval){ [ 607.556582] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5290ffa4-822d-f08b-0706-58f055b01d12" [ 607.556582] env[69994]: _type = "Task" [ 607.556582] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.568758] env[69994]: DEBUG oslo_vmware.api [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5290ffa4-822d-f08b-0706-58f055b01d12, 'name': SearchDatastore_Task, 'duration_secs': 0.008385} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.570696] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6fbf7a54-4185-4cb5-867a-da73cdfc7303 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.575967] env[69994]: DEBUG oslo_vmware.api [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Waiting for the task: (returnval){ [ 607.575967] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52006d39-cd6b-6e2b-5f38-0bdba7812cf7" [ 607.575967] env[69994]: _type = "Task" [ 607.575967] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.584860] env[69994]: DEBUG oslo_vmware.api [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52006d39-cd6b-6e2b-5f38-0bdba7812cf7, 'name': SearchDatastore_Task, 'duration_secs': 0.007432} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.586611] env[69994]: DEBUG oslo_concurrency.lockutils [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 607.586611] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 2e374549-00a2-4014-90e0-ceccbe4360fa/2e374549-00a2-4014-90e0-ceccbe4360fa.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 607.586611] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d5837a05-1963-465c-90c2-a5ffe71d2ee8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.591846] env[69994]: DEBUG oslo_vmware.api [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Waiting for the task: (returnval){ [ 607.591846] env[69994]: value = "task-3241187" [ 607.591846] env[69994]: _type = "Task" [ 607.591846] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.599692] env[69994]: DEBUG oslo_vmware.api [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Task: {'id': task-3241187, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.778197] env[69994]: DEBUG oslo_vmware.api [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Task: {'id': task-3241186, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.689208} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.778616] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] f3945280-ee10-426b-bcab-3e52e8779c55/f3945280-ee10-426b-bcab-3e52e8779c55.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 607.778772] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 607.779070] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-87f38d89-e4c8-4169-8eb7-a9b5a0869fd5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.786538] env[69994]: DEBUG oslo_vmware.api [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Waiting for the task: (returnval){ [ 607.786538] env[69994]: value = "task-3241188" [ 607.786538] env[69994]: _type = "Task" [ 607.786538] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.800825] env[69994]: DEBUG oslo_vmware.api [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Task: {'id': task-3241188, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.989882] env[69994]: DEBUG nova.scheduler.client.report [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 608.104364] env[69994]: DEBUG oslo_vmware.api [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Task: {'id': task-3241187, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.134471] env[69994]: DEBUG nova.compute.manager [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 608.171825] env[69994]: DEBUG nova.virt.hardware [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 608.172135] env[69994]: DEBUG nova.virt.hardware [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 608.172302] env[69994]: DEBUG nova.virt.hardware [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 608.172524] env[69994]: DEBUG nova.virt.hardware [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 608.172666] env[69994]: DEBUG nova.virt.hardware [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 608.172822] env[69994]: DEBUG nova.virt.hardware [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 608.173100] env[69994]: DEBUG nova.virt.hardware [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 608.173283] env[69994]: DEBUG nova.virt.hardware [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 608.173526] env[69994]: DEBUG nova.virt.hardware [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 608.173625] env[69994]: DEBUG nova.virt.hardware [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 608.173846] env[69994]: DEBUG nova.virt.hardware [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 608.174977] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0180da07-6bae-4a5f-9a37-2d998ea755d8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.185524] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51ce0faa-e72f-46b4-84ca-bc9ce5284bc2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.211708] env[69994]: DEBUG nova.network.neutron [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] [instance: 91666839-f440-499e-acf0-07d352e701ab] Successfully updated port: ce41f31f-0a63-4393-90a3-de51de55789e {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 608.300186] env[69994]: DEBUG oslo_vmware.api [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Task: {'id': task-3241188, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.180391} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.300186] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 608.300290] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5af533f4-3b21-44af-b5c9-dd5d57650c2e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.322511] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Reconfiguring VM instance instance-00000002 to attach disk [datastore2] f3945280-ee10-426b-bcab-3e52e8779c55/f3945280-ee10-426b-bcab-3e52e8779c55.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 608.324432] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1934fce9-ec70-42a9-936f-c14dbdffa18a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.343069] env[69994]: DEBUG oslo_concurrency.lockutils [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Acquiring lock "53a8714c-50f7-4990-a3d9-86f8fc908d03" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 608.343302] env[69994]: DEBUG oslo_concurrency.lockutils [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Lock "53a8714c-50f7-4990-a3d9-86f8fc908d03" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 608.351588] env[69994]: DEBUG oslo_vmware.api [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Waiting for the task: (returnval){ [ 608.351588] env[69994]: value = "task-3241189" [ 608.351588] env[69994]: _type = "Task" [ 608.351588] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.361401] env[69994]: DEBUG oslo_vmware.api [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Task: {'id': task-3241189, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.461368] env[69994]: DEBUG nova.compute.manager [req-4401416b-ed1b-474a-a649-6755ce311194 req-4feaa949-a73d-430d-b646-91825dc95b60 service nova] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Received event network-changed-de9c0db5-bb4c-466b-ab75-f2d6b988725d {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 608.461368] env[69994]: DEBUG nova.compute.manager [req-4401416b-ed1b-474a-a649-6755ce311194 req-4feaa949-a73d-430d-b646-91825dc95b60 service nova] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Refreshing instance network info cache due to event network-changed-de9c0db5-bb4c-466b-ab75-f2d6b988725d. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 608.463534] env[69994]: DEBUG oslo_concurrency.lockutils [req-4401416b-ed1b-474a-a649-6755ce311194 req-4feaa949-a73d-430d-b646-91825dc95b60 service nova] Acquiring lock "refresh_cache-2e374549-00a2-4014-90e0-ceccbe4360fa" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.463534] env[69994]: DEBUG oslo_concurrency.lockutils [req-4401416b-ed1b-474a-a649-6755ce311194 req-4feaa949-a73d-430d-b646-91825dc95b60 service nova] Acquired lock "refresh_cache-2e374549-00a2-4014-90e0-ceccbe4360fa" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 608.463534] env[69994]: DEBUG nova.network.neutron [req-4401416b-ed1b-474a-a649-6755ce311194 req-4feaa949-a73d-430d-b646-91825dc95b60 service nova] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Refreshing network info cache for port de9c0db5-bb4c-466b-ab75-f2d6b988725d {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 608.497246] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.403s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 608.497981] env[69994]: DEBUG nova.compute.manager [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 608.502584] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.011s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 608.503986] env[69994]: INFO nova.compute.claims [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: e9bc15f9-e957-487f-b8d5-d1332b185dcf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 608.608431] env[69994]: DEBUG oslo_vmware.api [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Task: {'id': task-3241187, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.715215] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Acquiring lock "refresh_cache-91666839-f440-499e-acf0-07d352e701ab" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.715215] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Acquired lock "refresh_cache-91666839-f440-499e-acf0-07d352e701ab" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 608.715291] env[69994]: DEBUG nova.network.neutron [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] [instance: 91666839-f440-499e-acf0-07d352e701ab] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 608.850694] env[69994]: DEBUG nova.compute.manager [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 608.864155] env[69994]: DEBUG oslo_vmware.api [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Task: {'id': task-3241189, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.008822] env[69994]: DEBUG nova.compute.utils [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 609.014625] env[69994]: DEBUG nova.compute.manager [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 609.018116] env[69994]: DEBUG nova.network.neutron [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 609.108524] env[69994]: DEBUG oslo_vmware.api [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Task: {'id': task-3241187, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.459631} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.108524] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 2e374549-00a2-4014-90e0-ceccbe4360fa/2e374549-00a2-4014-90e0-ceccbe4360fa.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 609.108524] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 609.108524] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b912e064-e375-4e8d-8d49-faf222372ac1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.115572] env[69994]: DEBUG oslo_vmware.api [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Waiting for the task: (returnval){ [ 609.115572] env[69994]: value = "task-3241190" [ 609.115572] env[69994]: _type = "Task" [ 609.115572] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.124136] env[69994]: DEBUG oslo_vmware.api [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Task: {'id': task-3241190, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.264558] env[69994]: DEBUG nova.policy [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6a676823f7a74624bcc84b94fa3cf0e8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5543f4937e604cc189cc63c178705112', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 609.336308] env[69994]: DEBUG nova.network.neutron [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Successfully created port: 1c4ae184-b8b0-409f-aff4-5568af2af1b9 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 609.366473] env[69994]: DEBUG oslo_vmware.api [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Task: {'id': task-3241189, 'name': ReconfigVM_Task, 'duration_secs': 0.977757} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.366763] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Reconfigured VM instance instance-00000002 to attach disk [datastore2] f3945280-ee10-426b-bcab-3e52e8779c55/f3945280-ee10-426b-bcab-3e52e8779c55.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 609.367453] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2142d96e-6439-423b-8f3a-bd84abc804c5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.373848] env[69994]: DEBUG oslo_vmware.api [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Waiting for the task: (returnval){ [ 609.373848] env[69994]: value = "task-3241191" [ 609.373848] env[69994]: _type = "Task" [ 609.373848] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.379430] env[69994]: DEBUG oslo_concurrency.lockutils [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 609.385382] env[69994]: DEBUG oslo_vmware.api [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Task: {'id': task-3241191, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.441724] env[69994]: DEBUG nova.network.neutron [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] [instance: 91666839-f440-499e-acf0-07d352e701ab] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 609.517751] env[69994]: DEBUG nova.compute.manager [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 609.626247] env[69994]: DEBUG oslo_vmware.api [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Task: {'id': task-3241190, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065842} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.630585] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 609.633060] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e91a844b-792a-40e6-8a70-ed25e9094982 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.664200] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Reconfiguring VM instance instance-00000001 to attach disk [datastore2] 2e374549-00a2-4014-90e0-ceccbe4360fa/2e374549-00a2-4014-90e0-ceccbe4360fa.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 609.665781] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2cb4065f-7cbd-4d13-af3b-fa21264b2b8f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.686618] env[69994]: DEBUG oslo_vmware.api [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Waiting for the task: (returnval){ [ 609.686618] env[69994]: value = "task-3241192" [ 609.686618] env[69994]: _type = "Task" [ 609.686618] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.699760] env[69994]: DEBUG oslo_vmware.api [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Task: {'id': task-3241192, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.746336] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-967b9d6f-d2b2-409a-9b7e-0df87b562ea2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.755602] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-532d470d-f49b-4fd3-ba1d-7d43d9f3b955 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.792323] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fce85da5-d86d-407d-bdad-aa13282ce8d6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.800451] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4655056a-a9b6-4437-b9cd-dddb8bd9fa78 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.814451] env[69994]: DEBUG nova.compute.provider_tree [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 609.884895] env[69994]: DEBUG oslo_vmware.api [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Task: {'id': task-3241191, 'name': Rename_Task, 'duration_secs': 0.129176} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.885267] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 609.885512] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8b21f77b-656a-4967-839f-5973266b6b51 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.892410] env[69994]: DEBUG oslo_vmware.api [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Waiting for the task: (returnval){ [ 609.892410] env[69994]: value = "task-3241193" [ 609.892410] env[69994]: _type = "Task" [ 609.892410] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.901864] env[69994]: DEBUG oslo_vmware.api [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Task: {'id': task-3241193, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.199814] env[69994]: DEBUG oslo_vmware.api [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Task: {'id': task-3241192, 'name': ReconfigVM_Task, 'duration_secs': 0.269317} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.200101] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Reconfigured VM instance instance-00000001 to attach disk [datastore2] 2e374549-00a2-4014-90e0-ceccbe4360fa/2e374549-00a2-4014-90e0-ceccbe4360fa.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 610.201221] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-550ed8df-b8ee-40b1-aaf9-216a641b44b4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.209036] env[69994]: DEBUG oslo_vmware.api [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Waiting for the task: (returnval){ [ 610.209036] env[69994]: value = "task-3241194" [ 610.209036] env[69994]: _type = "Task" [ 610.209036] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.219193] env[69994]: DEBUG oslo_vmware.api [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Task: {'id': task-3241194, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.291304] env[69994]: DEBUG nova.network.neutron [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] [instance: 91666839-f440-499e-acf0-07d352e701ab] Updating instance_info_cache with network_info: [{"id": "ce41f31f-0a63-4393-90a3-de51de55789e", "address": "fa:16:3e:c2:ac:fe", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.150", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce41f31f-0a", "ovs_interfaceid": "ce41f31f-0a63-4393-90a3-de51de55789e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 610.317825] env[69994]: DEBUG nova.scheduler.client.report [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 610.402035] env[69994]: DEBUG oslo_vmware.api [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Task: {'id': task-3241193, 'name': PowerOnVM_Task, 'duration_secs': 0.42354} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.403447] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 610.404193] env[69994]: INFO nova.compute.manager [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Took 9.12 seconds to spawn the instance on the hypervisor. [ 610.404193] env[69994]: DEBUG nova.compute.manager [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 610.405011] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b686244e-c724-4931-84bb-4ed8fd738343 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.485141] env[69994]: DEBUG nova.network.neutron [req-4401416b-ed1b-474a-a649-6755ce311194 req-4feaa949-a73d-430d-b646-91825dc95b60 service nova] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Updated VIF entry in instance network info cache for port de9c0db5-bb4c-466b-ab75-f2d6b988725d. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 610.485498] env[69994]: DEBUG nova.network.neutron [req-4401416b-ed1b-474a-a649-6755ce311194 req-4feaa949-a73d-430d-b646-91825dc95b60 service nova] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Updating instance_info_cache with network_info: [{"id": "de9c0db5-bb4c-466b-ab75-f2d6b988725d", "address": "fa:16:3e:54:84:a1", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.15", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde9c0db5-bb", "ovs_interfaceid": "de9c0db5-bb4c-466b-ab75-f2d6b988725d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 610.539680] env[69994]: DEBUG nova.compute.manager [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 610.581053] env[69994]: DEBUG nova.virt.hardware [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 610.584171] env[69994]: DEBUG nova.virt.hardware [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 610.584171] env[69994]: DEBUG nova.virt.hardware [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 610.584171] env[69994]: DEBUG nova.virt.hardware [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 610.584171] env[69994]: DEBUG nova.virt.hardware [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 610.584171] env[69994]: DEBUG nova.virt.hardware [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 610.584611] env[69994]: DEBUG nova.virt.hardware [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 610.584611] env[69994]: DEBUG nova.virt.hardware [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 610.584611] env[69994]: DEBUG nova.virt.hardware [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 610.584611] env[69994]: DEBUG nova.virt.hardware [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 610.584611] env[69994]: DEBUG nova.virt.hardware [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 610.584795] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Acquiring lock "2f710439-0216-401e-9759-af584f9bd00d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 610.584795] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Lock "2f710439-0216-401e-9759-af584f9bd00d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 610.584795] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78b85a02-8627-4c2b-a279-06379a9c1d0f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.598776] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67958e36-c101-494a-87f6-099cb58dc8ec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.725057] env[69994]: DEBUG oslo_vmware.api [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Task: {'id': task-3241194, 'name': Rename_Task, 'duration_secs': 0.214173} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.725921] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 610.725921] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-07a72f81-a813-4a0e-b8d9-439b6d9a14e0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.734802] env[69994]: DEBUG oslo_vmware.api [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Waiting for the task: (returnval){ [ 610.734802] env[69994]: value = "task-3241195" [ 610.734802] env[69994]: _type = "Task" [ 610.734802] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.744076] env[69994]: DEBUG oslo_vmware.api [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Task: {'id': task-3241195, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.784667] env[69994]: DEBUG nova.network.neutron [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Successfully updated port: b1e9d712-4ee9-4431-b7ed-f8221fb33b62 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 610.795355] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Releasing lock "refresh_cache-91666839-f440-499e-acf0-07d352e701ab" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 610.795763] env[69994]: DEBUG nova.compute.manager [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] [instance: 91666839-f440-499e-acf0-07d352e701ab] Instance network_info: |[{"id": "ce41f31f-0a63-4393-90a3-de51de55789e", "address": "fa:16:3e:c2:ac:fe", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.150", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce41f31f-0a", "ovs_interfaceid": "ce41f31f-0a63-4393-90a3-de51de55789e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 610.797173] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] [instance: 91666839-f440-499e-acf0-07d352e701ab] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:ac:fe', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0cd5d325-3053-407e-a4ee-f627e82a23f9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ce41f31f-0a63-4393-90a3-de51de55789e', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 610.809864] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Creating folder: Project (f46255739e0f4fb2b40023c296e800cc). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 610.811724] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-95f79ccd-d867-4509-91fe-d1f878142376 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.822952] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.320s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 610.824337] env[69994]: DEBUG nova.compute.manager [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: e9bc15f9-e957-487f-b8d5-d1332b185dcf] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 610.828202] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Created folder: Project (f46255739e0f4fb2b40023c296e800cc) in parent group-v647729. [ 610.828433] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Creating folder: Instances. Parent ref: group-v647736. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 610.829286] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.255s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 610.830792] env[69994]: INFO nova.compute.claims [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 610.833723] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fd34eeb4-629b-4337-8743-6d98f14ff6eb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.843685] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Created folder: Instances in parent group-v647736. [ 610.844066] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 610.844298] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 91666839-f440-499e-acf0-07d352e701ab] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 610.844529] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ad3afe6d-4add-47d6-be8c-5db7388e3b65 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.866579] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 610.866579] env[69994]: value = "task-3241198" [ 610.866579] env[69994]: _type = "Task" [ 610.866579] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.878857] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241198, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.938402] env[69994]: INFO nova.compute.manager [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Took 15.97 seconds to build instance. [ 610.989421] env[69994]: DEBUG oslo_concurrency.lockutils [req-4401416b-ed1b-474a-a649-6755ce311194 req-4feaa949-a73d-430d-b646-91825dc95b60 service nova] Releasing lock "refresh_cache-2e374549-00a2-4014-90e0-ceccbe4360fa" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 611.106149] env[69994]: DEBUG nova.network.neutron [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Successfully created port: 239a38e8-39e8-487b-ba99-cf85c99d41f1 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 611.247170] env[69994]: DEBUG oslo_vmware.api [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Task: {'id': task-3241195, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.288256] env[69994]: DEBUG oslo_concurrency.lockutils [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Acquiring lock "refresh_cache-2244e8ad-75f6-42bc-a97d-7f26eaba1aa2" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.288256] env[69994]: DEBUG oslo_concurrency.lockutils [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Acquired lock "refresh_cache-2244e8ad-75f6-42bc-a97d-7f26eaba1aa2" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 611.289606] env[69994]: DEBUG nova.network.neutron [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 611.332823] env[69994]: DEBUG nova.compute.utils [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 611.332823] env[69994]: DEBUG nova.compute.manager [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: e9bc15f9-e957-487f-b8d5-d1332b185dcf] Not allocating networking since 'none' was specified. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 611.368155] env[69994]: DEBUG nova.compute.manager [req-880b1964-8b01-4bb0-85df-1e024a29a8d5 req-b15bfa88-7499-40d4-ac26-15fd2ad4f026 service nova] [instance: 91666839-f440-499e-acf0-07d352e701ab] Received event network-vif-plugged-ce41f31f-0a63-4393-90a3-de51de55789e {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 611.368155] env[69994]: DEBUG oslo_concurrency.lockutils [req-880b1964-8b01-4bb0-85df-1e024a29a8d5 req-b15bfa88-7499-40d4-ac26-15fd2ad4f026 service nova] Acquiring lock "91666839-f440-499e-acf0-07d352e701ab-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 611.368155] env[69994]: DEBUG oslo_concurrency.lockutils [req-880b1964-8b01-4bb0-85df-1e024a29a8d5 req-b15bfa88-7499-40d4-ac26-15fd2ad4f026 service nova] Lock "91666839-f440-499e-acf0-07d352e701ab-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 611.368329] env[69994]: DEBUG oslo_concurrency.lockutils [req-880b1964-8b01-4bb0-85df-1e024a29a8d5 req-b15bfa88-7499-40d4-ac26-15fd2ad4f026 service nova] Lock "91666839-f440-499e-acf0-07d352e701ab-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 611.368329] env[69994]: DEBUG nova.compute.manager [req-880b1964-8b01-4bb0-85df-1e024a29a8d5 req-b15bfa88-7499-40d4-ac26-15fd2ad4f026 service nova] [instance: 91666839-f440-499e-acf0-07d352e701ab] No waiting events found dispatching network-vif-plugged-ce41f31f-0a63-4393-90a3-de51de55789e {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 611.368480] env[69994]: WARNING nova.compute.manager [req-880b1964-8b01-4bb0-85df-1e024a29a8d5 req-b15bfa88-7499-40d4-ac26-15fd2ad4f026 service nova] [instance: 91666839-f440-499e-acf0-07d352e701ab] Received unexpected event network-vif-plugged-ce41f31f-0a63-4393-90a3-de51de55789e for instance with vm_state building and task_state spawning. [ 611.379106] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241198, 'name': CreateVM_Task, 'duration_secs': 0.343236} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.379106] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 91666839-f440-499e-acf0-07d352e701ab] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 611.379726] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.379912] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 611.380290] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 611.380591] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51e7de32-c72a-4dd9-aa8c-675c1c3e16db {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.385742] env[69994]: DEBUG oslo_vmware.api [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Waiting for the task: (returnval){ [ 611.385742] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52389e38-72c3-2df3-5bc5-69558ce406f4" [ 611.385742] env[69994]: _type = "Task" [ 611.385742] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.394984] env[69994]: DEBUG oslo_vmware.api [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52389e38-72c3-2df3-5bc5-69558ce406f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.437749] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a50c2bb-84e3-4473-bf80-f4b33f93a5e9 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Lock "f3945280-ee10-426b-bcab-3e52e8779c55" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.491s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 611.747722] env[69994]: DEBUG oslo_vmware.api [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Task: {'id': task-3241195, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.833999] env[69994]: DEBUG nova.compute.manager [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: e9bc15f9-e957-487f-b8d5-d1332b185dcf] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 611.899454] env[69994]: DEBUG oslo_vmware.api [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52389e38-72c3-2df3-5bc5-69558ce406f4, 'name': SearchDatastore_Task, 'duration_secs': 0.014258} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.899595] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 611.899828] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] [instance: 91666839-f440-499e-acf0-07d352e701ab] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 611.900446] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.900580] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 611.900792] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 611.901377] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-87c30c1d-083d-46a4-9a9a-350ee33df819 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.911286] env[69994]: DEBUG nova.network.neutron [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 611.918196] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 611.918196] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 611.918889] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1f57edb-2a37-4f62-803b-47a008633bd3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.924805] env[69994]: DEBUG oslo_vmware.api [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Waiting for the task: (returnval){ [ 611.924805] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5211086d-2579-d5f1-1c3d-e676e3c89bb0" [ 611.924805] env[69994]: _type = "Task" [ 611.924805] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.933654] env[69994]: DEBUG oslo_vmware.api [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5211086d-2579-d5f1-1c3d-e676e3c89bb0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.940227] env[69994]: DEBUG nova.compute.manager [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 612.159825] env[69994]: DEBUG oslo_concurrency.lockutils [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Acquiring lock "b003b7c2-e754-440e-8a65-13c5e9c68cd5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 612.159825] env[69994]: DEBUG oslo_concurrency.lockutils [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Lock "b003b7c2-e754-440e-8a65-13c5e9c68cd5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 612.189607] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b417c686-2801-4f21-9110-fba53dbbce75 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.199831] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-265fd83a-d8ca-44a4-845a-d9109a6a99e1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.232309] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf0f2c1b-af9d-48cc-8f0f-7697ce0d1d2e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.242226] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aee16be-a6f4-4173-9735-8657e0397b65 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.250210] env[69994]: DEBUG oslo_vmware.api [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Task: {'id': task-3241195, 'name': PowerOnVM_Task, 'duration_secs': 1.453959} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.250787] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 612.251014] env[69994]: INFO nova.compute.manager [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Took 13.21 seconds to spawn the instance on the hypervisor. [ 612.251226] env[69994]: DEBUG nova.compute.manager [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 612.251920] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a421038-3f2b-46ed-a8ca-a469a8ea9781 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.263087] env[69994]: DEBUG nova.compute.provider_tree [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 612.440773] env[69994]: DEBUG oslo_vmware.api [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5211086d-2579-d5f1-1c3d-e676e3c89bb0, 'name': SearchDatastore_Task, 'duration_secs': 0.009472} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.441593] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5e41e0f-fc8f-47cd-95e2-51c8f1f6fa96 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.459555] env[69994]: DEBUG oslo_vmware.api [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Waiting for the task: (returnval){ [ 612.459555] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]523d4994-01f7-75dd-2a9a-e72db259217e" [ 612.459555] env[69994]: _type = "Task" [ 612.459555] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.479478] env[69994]: DEBUG oslo_vmware.api [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]523d4994-01f7-75dd-2a9a-e72db259217e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.482086] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 612.730667] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Acquiring lock "21f66039-6292-4d9c-b97d-668d029def24" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 612.730901] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Lock "21f66039-6292-4d9c-b97d-668d029def24" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 612.770600] env[69994]: DEBUG nova.scheduler.client.report [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 612.788807] env[69994]: INFO nova.compute.manager [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Took 18.07 seconds to build instance. [ 612.843418] env[69994]: DEBUG nova.compute.manager [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: e9bc15f9-e957-487f-b8d5-d1332b185dcf] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 612.851689] env[69994]: DEBUG nova.network.neutron [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Updating instance_info_cache with network_info: [{"id": "b1e9d712-4ee9-4431-b7ed-f8221fb33b62", "address": "fa:16:3e:3c:eb:59", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.165", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1e9d712-4e", "ovs_interfaceid": "b1e9d712-4ee9-4431-b7ed-f8221fb33b62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 612.872815] env[69994]: DEBUG nova.virt.hardware [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 612.873226] env[69994]: DEBUG nova.virt.hardware [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 612.873415] env[69994]: DEBUG nova.virt.hardware [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 612.873605] env[69994]: DEBUG nova.virt.hardware [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 612.873750] env[69994]: DEBUG nova.virt.hardware [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 612.873896] env[69994]: DEBUG nova.virt.hardware [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 612.874117] env[69994]: DEBUG nova.virt.hardware [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 612.874277] env[69994]: DEBUG nova.virt.hardware [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 612.874440] env[69994]: DEBUG nova.virt.hardware [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 612.875232] env[69994]: DEBUG nova.virt.hardware [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 612.875532] env[69994]: DEBUG nova.virt.hardware [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 612.876682] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ef3a0a8-ebed-4b53-8295-49601d2307b8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.884520] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-985cb6e8-27f4-46fd-8392-02f437d7c8ec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.899016] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: e9bc15f9-e957-487f-b8d5-d1332b185dcf] Instance VIF info [] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 612.904614] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Creating folder: Project (a9efee78ee11427399b6d40850393eb9). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 612.904932] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a75e045d-584d-44db-80c0-65a57451ec64 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.915043] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Created folder: Project (a9efee78ee11427399b6d40850393eb9) in parent group-v647729. [ 612.915308] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Creating folder: Instances. Parent ref: group-v647739. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 612.915568] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a686f154-644b-4bbe-a752-cc0b70ce5366 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.925733] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Created folder: Instances in parent group-v647739. [ 612.925733] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 612.925733] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9bc15f9-e957-487f-b8d5-d1332b185dcf] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 612.925733] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-39420586-29af-472d-9764-d3ba63cce31f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.945083] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 612.945083] env[69994]: value = "task-3241201" [ 612.945083] env[69994]: _type = "Task" [ 612.945083] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.953455] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241201, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.972158] env[69994]: DEBUG oslo_vmware.api [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]523d4994-01f7-75dd-2a9a-e72db259217e, 'name': SearchDatastore_Task, 'duration_secs': 0.020586} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.972425] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 612.972679] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 91666839-f440-499e-acf0-07d352e701ab/91666839-f440-499e-acf0-07d352e701ab.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 612.972965] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a35033c5-f8bc-4ca6-8f8e-f75334f51024 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.980258] env[69994]: DEBUG oslo_vmware.api [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Waiting for the task: (returnval){ [ 612.980258] env[69994]: value = "task-3241202" [ 612.980258] env[69994]: _type = "Task" [ 612.980258] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.989512] env[69994]: DEBUG oslo_vmware.api [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Task: {'id': task-3241202, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.997241] env[69994]: DEBUG nova.compute.manager [req-d6f47012-008f-424d-9804-c04db7bc5a66 req-6f7d7bfb-ae44-49d1-954a-e856851c2643 service nova] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Received event network-vif-plugged-b1e9d712-4ee9-4431-b7ed-f8221fb33b62 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 612.997368] env[69994]: DEBUG oslo_concurrency.lockutils [req-d6f47012-008f-424d-9804-c04db7bc5a66 req-6f7d7bfb-ae44-49d1-954a-e856851c2643 service nova] Acquiring lock "2244e8ad-75f6-42bc-a97d-7f26eaba1aa2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 612.997575] env[69994]: DEBUG oslo_concurrency.lockutils [req-d6f47012-008f-424d-9804-c04db7bc5a66 req-6f7d7bfb-ae44-49d1-954a-e856851c2643 service nova] Lock "2244e8ad-75f6-42bc-a97d-7f26eaba1aa2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 612.997751] env[69994]: DEBUG oslo_concurrency.lockutils [req-d6f47012-008f-424d-9804-c04db7bc5a66 req-6f7d7bfb-ae44-49d1-954a-e856851c2643 service nova] Lock "2244e8ad-75f6-42bc-a97d-7f26eaba1aa2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 612.997924] env[69994]: DEBUG nova.compute.manager [req-d6f47012-008f-424d-9804-c04db7bc5a66 req-6f7d7bfb-ae44-49d1-954a-e856851c2643 service nova] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] No waiting events found dispatching network-vif-plugged-b1e9d712-4ee9-4431-b7ed-f8221fb33b62 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 612.998108] env[69994]: WARNING nova.compute.manager [req-d6f47012-008f-424d-9804-c04db7bc5a66 req-6f7d7bfb-ae44-49d1-954a-e856851c2643 service nova] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Received unexpected event network-vif-plugged-b1e9d712-4ee9-4431-b7ed-f8221fb33b62 for instance with vm_state building and task_state spawning. [ 613.275550] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.446s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 613.276150] env[69994]: DEBUG nova.compute.manager [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 613.281713] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 7.063s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 613.290548] env[69994]: DEBUG oslo_concurrency.lockutils [None req-576e64a0-b189-49fc-8fc1-aa1d03f6387d tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Lock "2e374549-00a2-4014-90e0-ceccbe4360fa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.577s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 613.293027] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "2e374549-00a2-4014-90e0-ceccbe4360fa" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 16.849s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 613.294854] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bbd1dab-1125-442c-8ac7-526cc0a997c7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.357122] env[69994]: DEBUG oslo_concurrency.lockutils [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Releasing lock "refresh_cache-2244e8ad-75f6-42bc-a97d-7f26eaba1aa2" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 613.357622] env[69994]: DEBUG nova.compute.manager [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Instance network_info: |[{"id": "b1e9d712-4ee9-4431-b7ed-f8221fb33b62", "address": "fa:16:3e:3c:eb:59", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.165", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1e9d712-4e", "ovs_interfaceid": "b1e9d712-4ee9-4431-b7ed-f8221fb33b62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 613.358218] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3c:eb:59', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0cd5d325-3053-407e-a4ee-f627e82a23f9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b1e9d712-4ee9-4431-b7ed-f8221fb33b62', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 613.367951] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Creating folder: Project (fbd311dd96cb4ab5b0b75bf914842ce8). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 613.369074] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-069cc43f-1fb7-4170-bb2a-0f2687bbf4f7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.390075] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Created folder: Project (fbd311dd96cb4ab5b0b75bf914842ce8) in parent group-v647729. [ 613.390624] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Creating folder: Instances. Parent ref: group-v647742. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 613.390624] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-47545fa5-8274-4c32-b325-567e57837f20 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.402904] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Created folder: Instances in parent group-v647742. [ 613.403564] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 613.403564] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 613.403564] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-15066558-ce85-4076-bd22-12a5cd9d5ab3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.433216] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 613.433216] env[69994]: value = "task-3241205" [ 613.433216] env[69994]: _type = "Task" [ 613.433216] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.444208] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241205, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.454511] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241201, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.493673] env[69994]: DEBUG oslo_vmware.api [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Task: {'id': task-3241202, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.785569] env[69994]: DEBUG nova.compute.utils [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 613.787283] env[69994]: DEBUG nova.compute.manager [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 613.787445] env[69994]: DEBUG nova.network.neutron [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 613.793932] env[69994]: DEBUG nova.compute.manager [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 613.813515] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "2e374549-00a2-4014-90e0-ceccbe4360fa" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.520s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 613.945484] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241205, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.963963] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241201, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.993527] env[69994]: DEBUG oslo_vmware.api [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Task: {'id': task-3241202, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.538638} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.996020] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 91666839-f440-499e-acf0-07d352e701ab/91666839-f440-499e-acf0-07d352e701ab.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 613.996020] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] [instance: 91666839-f440-499e-acf0-07d352e701ab] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 613.996020] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-49f106d6-da90-4746-bdd8-ac007975ffaf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.002025] env[69994]: DEBUG oslo_vmware.api [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Waiting for the task: (returnval){ [ 614.002025] env[69994]: value = "task-3241206" [ 614.002025] env[69994]: _type = "Task" [ 614.002025] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.010603] env[69994]: DEBUG oslo_vmware.api [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Task: {'id': task-3241206, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.118630] env[69994]: DEBUG nova.network.neutron [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Successfully updated port: 1c4ae184-b8b0-409f-aff4-5568af2af1b9 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 614.248675] env[69994]: DEBUG nova.policy [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c779cd89bd7249be9a971c1b98cc5d25', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd94527f3e4404496ba30fa03ab0f3888', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 614.295213] env[69994]: DEBUG nova.compute.manager [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 614.325693] env[69994]: DEBUG oslo_concurrency.lockutils [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 614.330535] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 2e374549-00a2-4014-90e0-ceccbe4360fa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 614.330710] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance f3945280-ee10-426b-bcab-3e52e8779c55 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 614.330839] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 91666839-f440-499e-acf0-07d352e701ab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 614.330960] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 614.331089] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance dbad6bed-64ba-4dfd-abad-c0b2c775ba2c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 614.331206] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 15d17772-ac57-49a3-b261-bf49b902f658 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 614.331319] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance e9bc15f9-e957-487f-b8d5-d1332b185dcf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 614.331431] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 614.446032] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241205, 'name': CreateVM_Task, 'duration_secs': 0.809933} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.446232] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 614.453759] env[69994]: DEBUG oslo_vmware.service [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9560470b-46d8-4299-9ca3-580af4074959 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.470217] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241201, 'name': CreateVM_Task, 'duration_secs': 1.3041} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.472252] env[69994]: DEBUG oslo_concurrency.lockutils [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 614.472252] env[69994]: DEBUG oslo_concurrency.lockutils [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 614.472551] env[69994]: DEBUG oslo_concurrency.lockutils [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 614.472758] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9bc15f9-e957-487f-b8d5-d1332b185dcf] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 614.472952] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a27453b-5c3c-43ce-9624-8bda2e0243a5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.475771] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 614.479644] env[69994]: DEBUG oslo_vmware.api [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Waiting for the task: (returnval){ [ 614.479644] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52f2fc58-ef4b-4795-c08c-e16a5a05b039" [ 614.479644] env[69994]: _type = "Task" [ 614.479644] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.489628] env[69994]: DEBUG oslo_vmware.api [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52f2fc58-ef4b-4795-c08c-e16a5a05b039, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.515610] env[69994]: DEBUG oslo_vmware.api [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Task: {'id': task-3241206, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063274} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.515850] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] [instance: 91666839-f440-499e-acf0-07d352e701ab] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 614.517013] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc515f41-ce57-4149-b05e-563d6a09aee7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.547495] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] [instance: 91666839-f440-499e-acf0-07d352e701ab] Reconfiguring VM instance instance-00000003 to attach disk [datastore2] 91666839-f440-499e-acf0-07d352e701ab/91666839-f440-499e-acf0-07d352e701ab.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 614.548917] env[69994]: DEBUG nova.network.neutron [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Successfully updated port: 239a38e8-39e8-487b-ba99-cf85c99d41f1 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 614.550373] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c7808c5d-5c16-4595-b12f-28313511cc43 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.576079] env[69994]: DEBUG oslo_vmware.api [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Waiting for the task: (returnval){ [ 614.576079] env[69994]: value = "task-3241207" [ 614.576079] env[69994]: _type = "Task" [ 614.576079] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.586566] env[69994]: DEBUG oslo_vmware.api [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Task: {'id': task-3241207, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.591701] env[69994]: DEBUG oslo_concurrency.lockutils [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Acquiring lock "f109c803-bf37-4845-8956-4336dbc8a946" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 614.591948] env[69994]: DEBUG oslo_concurrency.lockutils [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Lock "f109c803-bf37-4845-8956-4336dbc8a946" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 614.612183] env[69994]: DEBUG nova.compute.manager [None req-fa19c1cd-1c87-499d-a0bb-d66bbd1d7297 tempest-ServerDiagnosticsV248Test-819149820 tempest-ServerDiagnosticsV248Test-819149820-project-admin] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 614.614319] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae15534d-7e05-4730-8a7c-11ad2219ffb0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.624634] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquiring lock "refresh_cache-dbad6bed-64ba-4dfd-abad-c0b2c775ba2c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 614.624634] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquired lock "refresh_cache-dbad6bed-64ba-4dfd-abad-c0b2c775ba2c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 614.624760] env[69994]: DEBUG nova.network.neutron [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 614.626944] env[69994]: INFO nova.compute.manager [None req-fa19c1cd-1c87-499d-a0bb-d66bbd1d7297 tempest-ServerDiagnosticsV248Test-819149820 tempest-ServerDiagnosticsV248Test-819149820-project-admin] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Retrieving diagnostics [ 614.628349] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cdcec7f-d154-4a61-8f58-72a07b316e39 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.838970] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 84efe900-1d79-42f9-b3c6-54299757cdbc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 614.997143] env[69994]: DEBUG oslo_concurrency.lockutils [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 614.997782] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 614.998160] env[69994]: DEBUG oslo_concurrency.lockutils [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 614.998458] env[69994]: DEBUG oslo_concurrency.lockutils [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 614.998758] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 614.999193] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 615.000047] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 615.000047] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2464a01f-0c47-4c9e-ab74-859a034ecc96 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.003583] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8820c8e-e27c-4ca1-9348-b214f99fc5b0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.008532] env[69994]: DEBUG oslo_vmware.api [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Waiting for the task: (returnval){ [ 615.008532] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]526b3e2c-be3d-3b02-8b6b-1bfeb6fe8bca" [ 615.008532] env[69994]: _type = "Task" [ 615.008532] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.017995] env[69994]: DEBUG oslo_vmware.api [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]526b3e2c-be3d-3b02-8b6b-1bfeb6fe8bca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.019235] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 615.019407] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 615.020175] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cf85679-7d4a-4ea8-82d0-c79c66d76818 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.027467] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8aca27ea-77c4-4577-9624-5effdbd2ce9f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.033711] env[69994]: DEBUG oslo_vmware.api [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Waiting for the task: (returnval){ [ 615.033711] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]523388af-0630-2d88-1b41-a4870c5e7a86" [ 615.033711] env[69994]: _type = "Task" [ 615.033711] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.042922] env[69994]: DEBUG oslo_vmware.api [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]523388af-0630-2d88-1b41-a4870c5e7a86, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.070043] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Acquiring lock "refresh_cache-15d17772-ac57-49a3-b261-bf49b902f658" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.071562] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Acquired lock "refresh_cache-15d17772-ac57-49a3-b261-bf49b902f658" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 615.071722] env[69994]: DEBUG nova.network.neutron [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 615.085465] env[69994]: DEBUG oslo_vmware.api [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Task: {'id': task-3241207, 'name': ReconfigVM_Task, 'duration_secs': 0.346304} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.086431] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] [instance: 91666839-f440-499e-acf0-07d352e701ab] Reconfigured VM instance instance-00000003 to attach disk [datastore2] 91666839-f440-499e-acf0-07d352e701ab/91666839-f440-499e-acf0-07d352e701ab.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 615.087139] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c8c6c888-e33f-46c2-9156-ef5ca2fad801 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.094373] env[69994]: DEBUG oslo_vmware.api [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Waiting for the task: (returnval){ [ 615.094373] env[69994]: value = "task-3241208" [ 615.094373] env[69994]: _type = "Task" [ 615.094373] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.105057] env[69994]: DEBUG oslo_vmware.api [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Task: {'id': task-3241208, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.204703] env[69994]: DEBUG nova.network.neutron [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 615.312065] env[69994]: DEBUG nova.compute.manager [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 615.349673] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 53a8714c-50f7-4990-a3d9-86f8fc908d03 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 615.359818] env[69994]: DEBUG nova.virt.hardware [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 615.360380] env[69994]: DEBUG nova.virt.hardware [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 615.360380] env[69994]: DEBUG nova.virt.hardware [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 615.360519] env[69994]: DEBUG nova.virt.hardware [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 615.360564] env[69994]: DEBUG nova.virt.hardware [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 615.361162] env[69994]: DEBUG nova.virt.hardware [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 615.361162] env[69994]: DEBUG nova.virt.hardware [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 615.367833] env[69994]: DEBUG nova.virt.hardware [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 615.367833] env[69994]: DEBUG nova.virt.hardware [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 615.367833] env[69994]: DEBUG nova.virt.hardware [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 615.367833] env[69994]: DEBUG nova.virt.hardware [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 615.367833] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-293fd34b-7434-4342-a4f3-beac8848e044 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.395531] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6505450-7352-43bb-811b-5c81dc3194e8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.472309] env[69994]: DEBUG nova.network.neutron [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Updating instance_info_cache with network_info: [{"id": "1c4ae184-b8b0-409f-aff4-5568af2af1b9", "address": "fa:16:3e:23:d3:c2", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.61", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c4ae184-b8", "ovs_interfaceid": "1c4ae184-b8b0-409f-aff4-5568af2af1b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.526449] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 615.526449] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: e9bc15f9-e957-487f-b8d5-d1332b185dcf] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 615.526449] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.532396] env[69994]: DEBUG nova.network.neutron [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Successfully created port: c52664a0-1200-4c1c-9848-50d360e81f40 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 615.548778] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Preparing fetch location {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 615.549075] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Creating directory with path [datastore1] vmware_temp/bb2247b3-a016-4b3a-b11b-611aa3b9ed45/cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 615.549330] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2cd4b08a-d505-47ac-bd4c-0f6265c18297 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.568538] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Created directory with path [datastore1] vmware_temp/bb2247b3-a016-4b3a-b11b-611aa3b9ed45/cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 615.568758] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Fetch image to [datastore1] vmware_temp/bb2247b3-a016-4b3a-b11b-611aa3b9ed45/cc2e14cc-b12f-480a-a387-dd21e9efda8b/tmp-sparse.vmdk {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 615.568929] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Downloading image file data cc2e14cc-b12f-480a-a387-dd21e9efda8b to [datastore1] vmware_temp/bb2247b3-a016-4b3a-b11b-611aa3b9ed45/cc2e14cc-b12f-480a-a387-dd21e9efda8b/tmp-sparse.vmdk on the data store datastore1 {{(pid=69994) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 615.570015] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-380eac91-0a1f-43b0-af71-e6d987b8b744 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.583806] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b1abf2f-cc0f-4cbf-a895-53e397c2dac7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.599061] env[69994]: DEBUG oslo_concurrency.lockutils [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Acquiring lock "7e7953f7-ed5d-4515-9181-93d343ad772d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 615.599061] env[69994]: DEBUG oslo_concurrency.lockutils [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Lock "7e7953f7-ed5d-4515-9181-93d343ad772d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 615.610746] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a640325-334e-4b77-85ab-c654aca3a031 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.617475] env[69994]: DEBUG oslo_vmware.api [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Task: {'id': task-3241208, 'name': Rename_Task, 'duration_secs': 0.149086} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.618122] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] [instance: 91666839-f440-499e-acf0-07d352e701ab] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 615.618401] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bb017acc-f240-404f-a40f-6832f88d7d0e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.647024] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-035541d3-06ac-4ee4-ad04-fc5777f59dea {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.651384] env[69994]: DEBUG oslo_vmware.api [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Waiting for the task: (returnval){ [ 615.651384] env[69994]: value = "task-3241209" [ 615.651384] env[69994]: _type = "Task" [ 615.651384] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.655989] env[69994]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4c7f5041-1c69-41d0-a1a6-220789a41d3c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.663577] env[69994]: DEBUG oslo_vmware.api [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Task: {'id': task-3241209, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.686986] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Downloading image file data cc2e14cc-b12f-480a-a387-dd21e9efda8b to the data store datastore1 {{(pid=69994) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 615.695038] env[69994]: DEBUG nova.network.neutron [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 615.761018] env[69994]: DEBUG oslo_vmware.rw_handles [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/bb2247b3-a016-4b3a-b11b-611aa3b9ed45/cc2e14cc-b12f-480a-a387-dd21e9efda8b/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69994) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 615.853267] env[69994]: DEBUG nova.compute.manager [req-7dc5e9f4-1f5a-43a9-85b3-7751ae19e2ea req-d076b3a6-bbd8-417d-b4aa-9c274e034415 service nova] [instance: 91666839-f440-499e-acf0-07d352e701ab] Received event network-changed-ce41f31f-0a63-4393-90a3-de51de55789e {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 615.853445] env[69994]: DEBUG nova.compute.manager [req-7dc5e9f4-1f5a-43a9-85b3-7751ae19e2ea req-d076b3a6-bbd8-417d-b4aa-9c274e034415 service nova] [instance: 91666839-f440-499e-acf0-07d352e701ab] Refreshing instance network info cache due to event network-changed-ce41f31f-0a63-4393-90a3-de51de55789e. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 615.854887] env[69994]: DEBUG oslo_concurrency.lockutils [req-7dc5e9f4-1f5a-43a9-85b3-7751ae19e2ea req-d076b3a6-bbd8-417d-b4aa-9c274e034415 service nova] Acquiring lock "refresh_cache-91666839-f440-499e-acf0-07d352e701ab" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.854887] env[69994]: DEBUG oslo_concurrency.lockutils [req-7dc5e9f4-1f5a-43a9-85b3-7751ae19e2ea req-d076b3a6-bbd8-417d-b4aa-9c274e034415 service nova] Acquired lock "refresh_cache-91666839-f440-499e-acf0-07d352e701ab" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 615.854887] env[69994]: DEBUG nova.network.neutron [req-7dc5e9f4-1f5a-43a9-85b3-7751ae19e2ea req-d076b3a6-bbd8-417d-b4aa-9c274e034415 service nova] [instance: 91666839-f440-499e-acf0-07d352e701ab] Refreshing network info cache for port ce41f31f-0a63-4393-90a3-de51de55789e {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 615.867782] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 2f710439-0216-401e-9759-af584f9bd00d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 615.985938] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Releasing lock "refresh_cache-dbad6bed-64ba-4dfd-abad-c0b2c775ba2c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 615.986283] env[69994]: DEBUG nova.compute.manager [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Instance network_info: |[{"id": "1c4ae184-b8b0-409f-aff4-5568af2af1b9", "address": "fa:16:3e:23:d3:c2", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.61", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c4ae184-b8", "ovs_interfaceid": "1c4ae184-b8b0-409f-aff4-5568af2af1b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 615.986872] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:23:d3:c2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0cd5d325-3053-407e-a4ee-f627e82a23f9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1c4ae184-b8b0-409f-aff4-5568af2af1b9', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 615.997068] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Creating folder: Project (1aa7929b2e0d467c99c25acd8b7e92d6). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 615.997301] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-427428c7-b6a1-42a2-b444-222760457d18 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.009570] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Created folder: Project (1aa7929b2e0d467c99c25acd8b7e92d6) in parent group-v647729. [ 616.009792] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Creating folder: Instances. Parent ref: group-v647745. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 616.013189] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-44ffffff-b473-4568-a04b-277df69b53af {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.024280] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Created folder: Instances in parent group-v647745. [ 616.024280] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 616.025698] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 616.025698] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f0cf8890-b3fa-4d88-8c8e-69e8d24fad23 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.049772] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 616.049772] env[69994]: value = "task-3241212" [ 616.049772] env[69994]: _type = "Task" [ 616.049772] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.066160] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241212, 'name': CreateVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.174139] env[69994]: DEBUG oslo_vmware.api [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Task: {'id': task-3241209, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.174139] env[69994]: DEBUG nova.network.neutron [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Updating instance_info_cache with network_info: [{"id": "239a38e8-39e8-487b-ba99-cf85c99d41f1", "address": "fa:16:3e:67:40:01", "network": {"id": "f408d322-470b-4fe5-9b10-67ef413491d1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1757029696-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5543f4937e604cc189cc63c178705112", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap239a38e8-39", "ovs_interfaceid": "239a38e8-39e8-487b-ba99-cf85c99d41f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 616.345531] env[69994]: DEBUG oslo_concurrency.lockutils [None req-97e070b9-9cda-4ffc-acc2-5c09551d9cad tempest-DeleteServersAdminTestJSON-1685494246 tempest-DeleteServersAdminTestJSON-1685494246-project-admin] Acquiring lock "2e374549-00a2-4014-90e0-ceccbe4360fa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 616.345531] env[69994]: DEBUG oslo_concurrency.lockutils [None req-97e070b9-9cda-4ffc-acc2-5c09551d9cad tempest-DeleteServersAdminTestJSON-1685494246 tempest-DeleteServersAdminTestJSON-1685494246-project-admin] Lock "2e374549-00a2-4014-90e0-ceccbe4360fa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 616.345531] env[69994]: DEBUG oslo_concurrency.lockutils [None req-97e070b9-9cda-4ffc-acc2-5c09551d9cad tempest-DeleteServersAdminTestJSON-1685494246 tempest-DeleteServersAdminTestJSON-1685494246-project-admin] Acquiring lock "2e374549-00a2-4014-90e0-ceccbe4360fa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 616.345531] env[69994]: DEBUG oslo_concurrency.lockutils [None req-97e070b9-9cda-4ffc-acc2-5c09551d9cad tempest-DeleteServersAdminTestJSON-1685494246 tempest-DeleteServersAdminTestJSON-1685494246-project-admin] Lock "2e374549-00a2-4014-90e0-ceccbe4360fa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 616.348761] env[69994]: DEBUG oslo_concurrency.lockutils [None req-97e070b9-9cda-4ffc-acc2-5c09551d9cad tempest-DeleteServersAdminTestJSON-1685494246 tempest-DeleteServersAdminTestJSON-1685494246-project-admin] Lock "2e374549-00a2-4014-90e0-ceccbe4360fa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 616.348761] env[69994]: INFO nova.compute.manager [None req-97e070b9-9cda-4ffc-acc2-5c09551d9cad tempest-DeleteServersAdminTestJSON-1685494246 tempest-DeleteServersAdminTestJSON-1685494246-project-admin] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Terminating instance [ 616.370874] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance b003b7c2-e754-440e-8a65-13c5e9c68cd5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 616.569292] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241212, 'name': CreateVM_Task, 'duration_secs': 0.394227} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.572281] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 616.573040] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 616.573203] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 616.573717] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 616.573816] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e828553a-9fae-43d4-b2d9-3144253a4e7a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.578727] env[69994]: DEBUG oslo_vmware.api [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for the task: (returnval){ [ 616.578727] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52c0e9be-2839-9c48-b165-9f94c3d22bfa" [ 616.578727] env[69994]: _type = "Task" [ 616.578727] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.580080] env[69994]: DEBUG oslo_vmware.rw_handles [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Completed reading data from the image iterator. {{(pid=69994) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 616.580080] env[69994]: DEBUG oslo_vmware.rw_handles [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/bb2247b3-a016-4b3a-b11b-611aa3b9ed45/cc2e14cc-b12f-480a-a387-dd21e9efda8b/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 616.591495] env[69994]: DEBUG oslo_vmware.api [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52c0e9be-2839-9c48-b165-9f94c3d22bfa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.666499] env[69994]: DEBUG oslo_vmware.api [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Task: {'id': task-3241209, 'name': PowerOnVM_Task, 'duration_secs': 0.541312} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.666783] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] [instance: 91666839-f440-499e-acf0-07d352e701ab] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 616.666980] env[69994]: INFO nova.compute.manager [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] [instance: 91666839-f440-499e-acf0-07d352e701ab] Took 13.13 seconds to spawn the instance on the hypervisor. [ 616.667305] env[69994]: DEBUG nova.compute.manager [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] [instance: 91666839-f440-499e-acf0-07d352e701ab] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 616.668037] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Downloaded image file data cc2e14cc-b12f-480a-a387-dd21e9efda8b to vmware_temp/bb2247b3-a016-4b3a-b11b-611aa3b9ed45/cc2e14cc-b12f-480a-a387-dd21e9efda8b/tmp-sparse.vmdk on the data store datastore1 {{(pid=69994) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 616.669890] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Caching image {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 616.670141] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Copying Virtual Disk [datastore1] vmware_temp/bb2247b3-a016-4b3a-b11b-611aa3b9ed45/cc2e14cc-b12f-480a-a387-dd21e9efda8b/tmp-sparse.vmdk to [datastore1] vmware_temp/bb2247b3-a016-4b3a-b11b-611aa3b9ed45/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 616.670950] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69d2bf5d-3ab5-4b40-a7df-762524b19239 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.677157] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b33bf163-faea-4aa3-9872-2531ed546299 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.677157] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Releasing lock "refresh_cache-15d17772-ac57-49a3-b261-bf49b902f658" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 616.677380] env[69994]: DEBUG nova.compute.manager [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Instance network_info: |[{"id": "239a38e8-39e8-487b-ba99-cf85c99d41f1", "address": "fa:16:3e:67:40:01", "network": {"id": "f408d322-470b-4fe5-9b10-67ef413491d1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1757029696-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5543f4937e604cc189cc63c178705112", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap239a38e8-39", "ovs_interfaceid": "239a38e8-39e8-487b-ba99-cf85c99d41f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 616.677548] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:67:40:01', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f0ef5aba-bd9a-42ff-a1a0-5e763986d70a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '239a38e8-39e8-487b-ba99-cf85c99d41f1', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 616.683916] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Creating folder: Project (5543f4937e604cc189cc63c178705112). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 616.685628] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2237d7c1-cc15-4548-87c0-4eaac822c8d4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.696309] env[69994]: DEBUG oslo_vmware.api [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Waiting for the task: (returnval){ [ 616.696309] env[69994]: value = "task-3241213" [ 616.696309] env[69994]: _type = "Task" [ 616.696309] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.707882] env[69994]: DEBUG oslo_vmware.api [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241213, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.711055] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Created folder: Project (5543f4937e604cc189cc63c178705112) in parent group-v647729. [ 616.711430] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Creating folder: Instances. Parent ref: group-v647748. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 616.712054] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-31907d17-d10f-4154-9aae-d72bcf2eddb4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.724058] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Created folder: Instances in parent group-v647748. [ 616.724524] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 616.728165] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 616.728165] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-15316335-b264-4d6d-95a9-dede5716361e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.754339] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 616.754339] env[69994]: value = "task-3241216" [ 616.754339] env[69994]: _type = "Task" [ 616.754339] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.762434] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241216, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.855496] env[69994]: DEBUG nova.compute.manager [None req-97e070b9-9cda-4ffc-acc2-5c09551d9cad tempest-DeleteServersAdminTestJSON-1685494246 tempest-DeleteServersAdminTestJSON-1685494246-project-admin] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 616.855496] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-97e070b9-9cda-4ffc-acc2-5c09551d9cad tempest-DeleteServersAdminTestJSON-1685494246 tempest-DeleteServersAdminTestJSON-1685494246-project-admin] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 616.855496] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43b70d8b-a974-42f9-858e-9f41adeb0aa0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.866698] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-97e070b9-9cda-4ffc-acc2-5c09551d9cad tempest-DeleteServersAdminTestJSON-1685494246 tempest-DeleteServersAdminTestJSON-1685494246-project-admin] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 616.866760] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bbbdffb0-31b4-4f8e-a968-3ef260614c5d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.877179] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 21f66039-6292-4d9c-b97d-668d029def24 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 616.878558] env[69994]: DEBUG oslo_vmware.api [None req-97e070b9-9cda-4ffc-acc2-5c09551d9cad tempest-DeleteServersAdminTestJSON-1685494246 tempest-DeleteServersAdminTestJSON-1685494246-project-admin] Waiting for the task: (returnval){ [ 616.878558] env[69994]: value = "task-3241217" [ 616.878558] env[69994]: _type = "Task" [ 616.878558] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.891567] env[69994]: DEBUG oslo_vmware.api [None req-97e070b9-9cda-4ffc-acc2-5c09551d9cad tempest-DeleteServersAdminTestJSON-1685494246 tempest-DeleteServersAdminTestJSON-1685494246-project-admin] Task: {'id': task-3241217, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.094209] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 617.094209] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 617.094209] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.214817] env[69994]: INFO nova.compute.manager [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] [instance: 91666839-f440-499e-acf0-07d352e701ab] Took 22.08 seconds to build instance. [ 617.229384] env[69994]: DEBUG oslo_vmware.api [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241213, 'name': CopyVirtualDisk_Task} progress is 70%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.268622] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241216, 'name': CreateVM_Task, 'duration_secs': 0.462087} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.268873] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 617.269526] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.270142] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 617.270504] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 617.271129] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03a4460d-acba-4e02-b66f-af2c1993ac9f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.276039] env[69994]: DEBUG oslo_vmware.api [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Waiting for the task: (returnval){ [ 617.276039] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]527db131-9e08-a740-7e23-179f1ac4ced6" [ 617.276039] env[69994]: _type = "Task" [ 617.276039] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.287350] env[69994]: DEBUG oslo_vmware.api [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]527db131-9e08-a740-7e23-179f1ac4ced6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.382543] env[69994]: DEBUG nova.network.neutron [req-7dc5e9f4-1f5a-43a9-85b3-7751ae19e2ea req-d076b3a6-bbd8-417d-b4aa-9c274e034415 service nova] [instance: 91666839-f440-499e-acf0-07d352e701ab] Updated VIF entry in instance network info cache for port ce41f31f-0a63-4393-90a3-de51de55789e. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 617.382543] env[69994]: DEBUG nova.network.neutron [req-7dc5e9f4-1f5a-43a9-85b3-7751ae19e2ea req-d076b3a6-bbd8-417d-b4aa-9c274e034415 service nova] [instance: 91666839-f440-499e-acf0-07d352e701ab] Updating instance_info_cache with network_info: [{"id": "ce41f31f-0a63-4393-90a3-de51de55789e", "address": "fa:16:3e:c2:ac:fe", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.150", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce41f31f-0a", "ovs_interfaceid": "ce41f31f-0a63-4393-90a3-de51de55789e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 617.391131] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance f109c803-bf37-4845-8956-4336dbc8a946 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 617.392291] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 617.392291] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2048MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 617.401179] env[69994]: DEBUG oslo_vmware.api [None req-97e070b9-9cda-4ffc-acc2-5c09551d9cad tempest-DeleteServersAdminTestJSON-1685494246 tempest-DeleteServersAdminTestJSON-1685494246-project-admin] Task: {'id': task-3241217, 'name': PowerOffVM_Task, 'duration_secs': 0.269379} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.401477] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-97e070b9-9cda-4ffc-acc2-5c09551d9cad tempest-DeleteServersAdminTestJSON-1685494246 tempest-DeleteServersAdminTestJSON-1685494246-project-admin] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 617.401653] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-97e070b9-9cda-4ffc-acc2-5c09551d9cad tempest-DeleteServersAdminTestJSON-1685494246 tempest-DeleteServersAdminTestJSON-1685494246-project-admin] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 617.401911] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-22777959-eb67-4dca-996c-8a6efff611d1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.479729] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-97e070b9-9cda-4ffc-acc2-5c09551d9cad tempest-DeleteServersAdminTestJSON-1685494246 tempest-DeleteServersAdminTestJSON-1685494246-project-admin] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 617.479813] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-97e070b9-9cda-4ffc-acc2-5c09551d9cad tempest-DeleteServersAdminTestJSON-1685494246 tempest-DeleteServersAdminTestJSON-1685494246-project-admin] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 617.480064] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-97e070b9-9cda-4ffc-acc2-5c09551d9cad tempest-DeleteServersAdminTestJSON-1685494246 tempest-DeleteServersAdminTestJSON-1685494246-project-admin] Deleting the datastore file [datastore2] 2e374549-00a2-4014-90e0-ceccbe4360fa {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 617.480219] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d4d7b9dd-27dc-4b0f-b0fe-d4a3aa62ca5b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.493675] env[69994]: DEBUG oslo_vmware.api [None req-97e070b9-9cda-4ffc-acc2-5c09551d9cad tempest-DeleteServersAdminTestJSON-1685494246 tempest-DeleteServersAdminTestJSON-1685494246-project-admin] Waiting for the task: (returnval){ [ 617.493675] env[69994]: value = "task-3241219" [ 617.493675] env[69994]: _type = "Task" [ 617.493675] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.502715] env[69994]: DEBUG oslo_vmware.api [None req-97e070b9-9cda-4ffc-acc2-5c09551d9cad tempest-DeleteServersAdminTestJSON-1685494246 tempest-DeleteServersAdminTestJSON-1685494246-project-admin] Task: {'id': task-3241219, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.667499] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d085c4d-d335-496b-9f4a-45f91f9a3ded {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.675917] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0066b0c5-f924-40c7-b6c7-ec6302627a23 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.709262] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee355061-d3f0-4f54-955e-24534d234024 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.721490] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc6c750a-fa02-4c23-8c7d-e43c18df5dc2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.724511] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e4fa1e84-8a86-4b45-a80b-14271755a943 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Lock "91666839-f440-499e-acf0-07d352e701ab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.596s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 617.735284] env[69994]: DEBUG oslo_vmware.api [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241213, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.978431} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.744948] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Copied Virtual Disk [datastore1] vmware_temp/bb2247b3-a016-4b3a-b11b-611aa3b9ed45/cc2e14cc-b12f-480a-a387-dd21e9efda8b/tmp-sparse.vmdk to [datastore1] vmware_temp/bb2247b3-a016-4b3a-b11b-611aa3b9ed45/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 617.745177] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Deleting the datastore file [datastore1] vmware_temp/bb2247b3-a016-4b3a-b11b-611aa3b9ed45/cc2e14cc-b12f-480a-a387-dd21e9efda8b/tmp-sparse.vmdk {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 617.745718] env[69994]: DEBUG nova.compute.provider_tree [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 617.748325] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3d711773-c26b-4a70-9923-a759e9a8b86a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.758815] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Acquiring lock "91bb882c-7b84-450f-bd03-91ea1ce739ce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 617.762015] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Lock "91bb882c-7b84-450f-bd03-91ea1ce739ce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 617.762015] env[69994]: DEBUG oslo_vmware.api [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Waiting for the task: (returnval){ [ 617.762015] env[69994]: value = "task-3241220" [ 617.762015] env[69994]: _type = "Task" [ 617.762015] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.768884] env[69994]: DEBUG oslo_vmware.api [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241220, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.789140] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 617.790531] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 617.790531] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.892037] env[69994]: DEBUG oslo_concurrency.lockutils [req-7dc5e9f4-1f5a-43a9-85b3-7751ae19e2ea req-d076b3a6-bbd8-417d-b4aa-9c274e034415 service nova] Releasing lock "refresh_cache-91666839-f440-499e-acf0-07d352e701ab" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 617.943745] env[69994]: DEBUG nova.compute.manager [req-872f0221-fce5-408c-9ecf-2006ac922c7d req-5c7843d5-feee-42e6-950a-f7a65a856ac9 service nova] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Received event network-changed-b1e9d712-4ee9-4431-b7ed-f8221fb33b62 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 617.943745] env[69994]: DEBUG nova.compute.manager [req-872f0221-fce5-408c-9ecf-2006ac922c7d req-5c7843d5-feee-42e6-950a-f7a65a856ac9 service nova] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Refreshing instance network info cache due to event network-changed-b1e9d712-4ee9-4431-b7ed-f8221fb33b62. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 617.943745] env[69994]: DEBUG oslo_concurrency.lockutils [req-872f0221-fce5-408c-9ecf-2006ac922c7d req-5c7843d5-feee-42e6-950a-f7a65a856ac9 service nova] Acquiring lock "refresh_cache-2244e8ad-75f6-42bc-a97d-7f26eaba1aa2" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.943745] env[69994]: DEBUG oslo_concurrency.lockutils [req-872f0221-fce5-408c-9ecf-2006ac922c7d req-5c7843d5-feee-42e6-950a-f7a65a856ac9 service nova] Acquired lock "refresh_cache-2244e8ad-75f6-42bc-a97d-7f26eaba1aa2" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 617.943745] env[69994]: DEBUG nova.network.neutron [req-872f0221-fce5-408c-9ecf-2006ac922c7d req-5c7843d5-feee-42e6-950a-f7a65a856ac9 service nova] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Refreshing network info cache for port b1e9d712-4ee9-4431-b7ed-f8221fb33b62 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 618.012050] env[69994]: DEBUG oslo_vmware.api [None req-97e070b9-9cda-4ffc-acc2-5c09551d9cad tempest-DeleteServersAdminTestJSON-1685494246 tempest-DeleteServersAdminTestJSON-1685494246-project-admin] Task: {'id': task-3241219, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.443989} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.012050] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-97e070b9-9cda-4ffc-acc2-5c09551d9cad tempest-DeleteServersAdminTestJSON-1685494246 tempest-DeleteServersAdminTestJSON-1685494246-project-admin] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 618.012050] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-97e070b9-9cda-4ffc-acc2-5c09551d9cad tempest-DeleteServersAdminTestJSON-1685494246 tempest-DeleteServersAdminTestJSON-1685494246-project-admin] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 618.012050] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-97e070b9-9cda-4ffc-acc2-5c09551d9cad tempest-DeleteServersAdminTestJSON-1685494246 tempest-DeleteServersAdminTestJSON-1685494246-project-admin] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 618.012050] env[69994]: INFO nova.compute.manager [None req-97e070b9-9cda-4ffc-acc2-5c09551d9cad tempest-DeleteServersAdminTestJSON-1685494246 tempest-DeleteServersAdminTestJSON-1685494246-project-admin] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Took 1.16 seconds to destroy the instance on the hypervisor. [ 618.012415] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-97e070b9-9cda-4ffc-acc2-5c09551d9cad tempest-DeleteServersAdminTestJSON-1685494246 tempest-DeleteServersAdminTestJSON-1685494246-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 618.012415] env[69994]: DEBUG nova.compute.manager [-] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 618.012415] env[69994]: DEBUG nova.network.neutron [-] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 618.236915] env[69994]: DEBUG nova.compute.manager [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 618.248290] env[69994]: DEBUG nova.scheduler.client.report [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 618.272574] env[69994]: DEBUG oslo_vmware.api [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241220, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.068004} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.274651] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 618.274651] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Moving file from [datastore1] vmware_temp/bb2247b3-a016-4b3a-b11b-611aa3b9ed45/cc2e14cc-b12f-480a-a387-dd21e9efda8b to [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b. {{(pid=69994) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 618.275028] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-9e6b5f80-4212-45c7-9841-f4e279e6b887 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.285386] env[69994]: DEBUG oslo_vmware.api [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Waiting for the task: (returnval){ [ 618.285386] env[69994]: value = "task-3241221" [ 618.285386] env[69994]: _type = "Task" [ 618.285386] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.301026] env[69994]: DEBUG oslo_vmware.api [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241221, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.610037] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Acquiring lock "5badecfd-5784-4968-8519-419a01c67465" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 618.610275] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Lock "5badecfd-5784-4968-8519-419a01c67465" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 618.755966] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69994) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 618.760408] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 5.475s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 618.760408] env[69994]: DEBUG oslo_concurrency.lockutils [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.479s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 618.760408] env[69994]: INFO nova.compute.claims [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 618.770065] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 618.803815] env[69994]: DEBUG oslo_vmware.api [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241221, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.03255} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.803815] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] File moved {{(pid=69994) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 618.803815] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Cleaning up location [datastore1] vmware_temp/bb2247b3-a016-4b3a-b11b-611aa3b9ed45 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 618.803815] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Deleting the datastore file [datastore1] vmware_temp/bb2247b3-a016-4b3a-b11b-611aa3b9ed45 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 618.803815] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-03b1c038-54cd-4106-b72d-644aaf5f4d35 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.809080] env[69994]: DEBUG oslo_vmware.api [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Waiting for the task: (returnval){ [ 618.809080] env[69994]: value = "task-3241222" [ 618.809080] env[69994]: _type = "Task" [ 618.809080] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.820423] env[69994]: DEBUG oslo_vmware.api [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241222, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.042528] env[69994]: DEBUG nova.network.neutron [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Successfully updated port: c52664a0-1200-4c1c-9848-50d360e81f40 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 619.127874] env[69994]: DEBUG nova.network.neutron [req-872f0221-fce5-408c-9ecf-2006ac922c7d req-5c7843d5-feee-42e6-950a-f7a65a856ac9 service nova] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Updated VIF entry in instance network info cache for port b1e9d712-4ee9-4431-b7ed-f8221fb33b62. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 619.128567] env[69994]: DEBUG nova.network.neutron [req-872f0221-fce5-408c-9ecf-2006ac922c7d req-5c7843d5-feee-42e6-950a-f7a65a856ac9 service nova] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Updating instance_info_cache with network_info: [{"id": "b1e9d712-4ee9-4431-b7ed-f8221fb33b62", "address": "fa:16:3e:3c:eb:59", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.165", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1e9d712-4e", "ovs_interfaceid": "b1e9d712-4ee9-4431-b7ed-f8221fb33b62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 619.139643] env[69994]: DEBUG nova.network.neutron [-] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 619.324601] env[69994]: DEBUG oslo_vmware.api [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241222, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.025805} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.324894] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 619.327510] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3e44c3d-9367-4c8d-b012-9904fcf0d3c4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.333262] env[69994]: DEBUG oslo_vmware.api [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Waiting for the task: (returnval){ [ 619.333262] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]529cfe16-fa42-fb23-d1ef-cfde92bf305b" [ 619.333262] env[69994]: _type = "Task" [ 619.333262] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.346365] env[69994]: DEBUG oslo_vmware.api [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]529cfe16-fa42-fb23-d1ef-cfde92bf305b, 'name': SearchDatastore_Task, 'duration_secs': 0.009525} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.346694] env[69994]: DEBUG oslo_concurrency.lockutils [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 619.347143] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2/2244e8ad-75f6-42bc-a97d-7f26eaba1aa2.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 619.347244] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 619.347349] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 619.347561] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0a7a66fb-1f6b-43e4-be12-55be75f8bb7b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.349957] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2ed95e1f-be6b-47e8-8755-b2096de9118a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.357647] env[69994]: DEBUG oslo_vmware.api [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Waiting for the task: (returnval){ [ 619.357647] env[69994]: value = "task-3241223" [ 619.357647] env[69994]: _type = "Task" [ 619.357647] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.358938] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 619.359270] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 619.363145] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-717d2885-e391-44e8-9c0a-b3c2a40b3dc7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.367901] env[69994]: DEBUG nova.compute.manager [None req-d63ce4cf-429f-444b-8af7-bbc89443f957 tempest-ServerDiagnosticsTest-1027841842 tempest-ServerDiagnosticsTest-1027841842-project-admin] [instance: 91666839-f440-499e-acf0-07d352e701ab] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 619.369728] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-929cef4c-94bf-4181-b0f3-0e3ace3eec50 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.379605] env[69994]: DEBUG oslo_vmware.api [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Waiting for the task: (returnval){ [ 619.379605] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52f8d574-4bb9-be38-4140-8517a4fec7e4" [ 619.379605] env[69994]: _type = "Task" [ 619.379605] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.379832] env[69994]: DEBUG oslo_vmware.api [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241223, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.391483] env[69994]: INFO nova.compute.manager [None req-d63ce4cf-429f-444b-8af7-bbc89443f957 tempest-ServerDiagnosticsTest-1027841842 tempest-ServerDiagnosticsTest-1027841842-project-admin] [instance: 91666839-f440-499e-acf0-07d352e701ab] Retrieving diagnostics [ 619.393078] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b69a4218-5741-460c-ab3b-b453dd65285f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.407237] env[69994]: DEBUG oslo_vmware.api [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52f8d574-4bb9-be38-4140-8517a4fec7e4, 'name': SearchDatastore_Task, 'duration_secs': 0.013004} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.408749] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a96647f-b3b3-41b9-9e60-590e584aa361 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.444196] env[69994]: DEBUG oslo_vmware.api [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Waiting for the task: (returnval){ [ 619.444196] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]525d365d-7082-7d44-eac7-c38720038d2e" [ 619.444196] env[69994]: _type = "Task" [ 619.444196] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.452295] env[69994]: DEBUG oslo_vmware.api [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]525d365d-7082-7d44-eac7-c38720038d2e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.547521] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Acquiring lock "refresh_cache-6ca85dc6-ace9-4c5e-a11e-a3d5060d766b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.547672] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Acquired lock "refresh_cache-6ca85dc6-ace9-4c5e-a11e-a3d5060d766b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 619.547821] env[69994]: DEBUG nova.network.neutron [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 619.634731] env[69994]: DEBUG oslo_concurrency.lockutils [req-872f0221-fce5-408c-9ecf-2006ac922c7d req-5c7843d5-feee-42e6-950a-f7a65a856ac9 service nova] Releasing lock "refresh_cache-2244e8ad-75f6-42bc-a97d-7f26eaba1aa2" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 619.635078] env[69994]: DEBUG nova.compute.manager [req-872f0221-fce5-408c-9ecf-2006ac922c7d req-5c7843d5-feee-42e6-950a-f7a65a856ac9 service nova] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Received event network-vif-plugged-1c4ae184-b8b0-409f-aff4-5568af2af1b9 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 619.635289] env[69994]: DEBUG oslo_concurrency.lockutils [req-872f0221-fce5-408c-9ecf-2006ac922c7d req-5c7843d5-feee-42e6-950a-f7a65a856ac9 service nova] Acquiring lock "dbad6bed-64ba-4dfd-abad-c0b2c775ba2c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 619.635492] env[69994]: DEBUG oslo_concurrency.lockutils [req-872f0221-fce5-408c-9ecf-2006ac922c7d req-5c7843d5-feee-42e6-950a-f7a65a856ac9 service nova] Lock "dbad6bed-64ba-4dfd-abad-c0b2c775ba2c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 619.635659] env[69994]: DEBUG oslo_concurrency.lockutils [req-872f0221-fce5-408c-9ecf-2006ac922c7d req-5c7843d5-feee-42e6-950a-f7a65a856ac9 service nova] Lock "dbad6bed-64ba-4dfd-abad-c0b2c775ba2c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 619.635819] env[69994]: DEBUG nova.compute.manager [req-872f0221-fce5-408c-9ecf-2006ac922c7d req-5c7843d5-feee-42e6-950a-f7a65a856ac9 service nova] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] No waiting events found dispatching network-vif-plugged-1c4ae184-b8b0-409f-aff4-5568af2af1b9 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 619.635981] env[69994]: WARNING nova.compute.manager [req-872f0221-fce5-408c-9ecf-2006ac922c7d req-5c7843d5-feee-42e6-950a-f7a65a856ac9 service nova] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Received unexpected event network-vif-plugged-1c4ae184-b8b0-409f-aff4-5568af2af1b9 for instance with vm_state building and task_state spawning. [ 619.636214] env[69994]: DEBUG nova.compute.manager [req-872f0221-fce5-408c-9ecf-2006ac922c7d req-5c7843d5-feee-42e6-950a-f7a65a856ac9 service nova] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Received event network-changed-1c4ae184-b8b0-409f-aff4-5568af2af1b9 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 619.636319] env[69994]: DEBUG nova.compute.manager [req-872f0221-fce5-408c-9ecf-2006ac922c7d req-5c7843d5-feee-42e6-950a-f7a65a856ac9 service nova] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Refreshing instance network info cache due to event network-changed-1c4ae184-b8b0-409f-aff4-5568af2af1b9. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 619.636504] env[69994]: DEBUG oslo_concurrency.lockutils [req-872f0221-fce5-408c-9ecf-2006ac922c7d req-5c7843d5-feee-42e6-950a-f7a65a856ac9 service nova] Acquiring lock "refresh_cache-dbad6bed-64ba-4dfd-abad-c0b2c775ba2c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.636690] env[69994]: DEBUG oslo_concurrency.lockutils [req-872f0221-fce5-408c-9ecf-2006ac922c7d req-5c7843d5-feee-42e6-950a-f7a65a856ac9 service nova] Acquired lock "refresh_cache-dbad6bed-64ba-4dfd-abad-c0b2c775ba2c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 619.636830] env[69994]: DEBUG nova.network.neutron [req-872f0221-fce5-408c-9ecf-2006ac922c7d req-5c7843d5-feee-42e6-950a-f7a65a856ac9 service nova] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Refreshing network info cache for port 1c4ae184-b8b0-409f-aff4-5568af2af1b9 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 619.642893] env[69994]: INFO nova.compute.manager [-] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Took 1.63 seconds to deallocate network for instance. [ 619.685449] env[69994]: DEBUG nova.compute.manager [req-cdf75fbf-64c0-46f6-9b7d-86a5a3ffa738 req-ba66ade5-c47d-4098-b000-cf65b63f5501 service nova] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Received event network-vif-plugged-239a38e8-39e8-487b-ba99-cf85c99d41f1 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 619.685449] env[69994]: DEBUG oslo_concurrency.lockutils [req-cdf75fbf-64c0-46f6-9b7d-86a5a3ffa738 req-ba66ade5-c47d-4098-b000-cf65b63f5501 service nova] Acquiring lock "15d17772-ac57-49a3-b261-bf49b902f658-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 619.685449] env[69994]: DEBUG oslo_concurrency.lockutils [req-cdf75fbf-64c0-46f6-9b7d-86a5a3ffa738 req-ba66ade5-c47d-4098-b000-cf65b63f5501 service nova] Lock "15d17772-ac57-49a3-b261-bf49b902f658-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 619.685570] env[69994]: DEBUG oslo_concurrency.lockutils [req-cdf75fbf-64c0-46f6-9b7d-86a5a3ffa738 req-ba66ade5-c47d-4098-b000-cf65b63f5501 service nova] Lock "15d17772-ac57-49a3-b261-bf49b902f658-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 619.686281] env[69994]: DEBUG nova.compute.manager [req-cdf75fbf-64c0-46f6-9b7d-86a5a3ffa738 req-ba66ade5-c47d-4098-b000-cf65b63f5501 service nova] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] No waiting events found dispatching network-vif-plugged-239a38e8-39e8-487b-ba99-cf85c99d41f1 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 619.686281] env[69994]: WARNING nova.compute.manager [req-cdf75fbf-64c0-46f6-9b7d-86a5a3ffa738 req-ba66ade5-c47d-4098-b000-cf65b63f5501 service nova] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Received unexpected event network-vif-plugged-239a38e8-39e8-487b-ba99-cf85c99d41f1 for instance with vm_state building and task_state spawning. [ 619.686281] env[69994]: DEBUG nova.compute.manager [req-cdf75fbf-64c0-46f6-9b7d-86a5a3ffa738 req-ba66ade5-c47d-4098-b000-cf65b63f5501 service nova] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Received event network-changed-239a38e8-39e8-487b-ba99-cf85c99d41f1 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 619.686502] env[69994]: DEBUG nova.compute.manager [req-cdf75fbf-64c0-46f6-9b7d-86a5a3ffa738 req-ba66ade5-c47d-4098-b000-cf65b63f5501 service nova] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Refreshing instance network info cache due to event network-changed-239a38e8-39e8-487b-ba99-cf85c99d41f1. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 619.686599] env[69994]: DEBUG oslo_concurrency.lockutils [req-cdf75fbf-64c0-46f6-9b7d-86a5a3ffa738 req-ba66ade5-c47d-4098-b000-cf65b63f5501 service nova] Acquiring lock "refresh_cache-15d17772-ac57-49a3-b261-bf49b902f658" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.686769] env[69994]: DEBUG oslo_concurrency.lockutils [req-cdf75fbf-64c0-46f6-9b7d-86a5a3ffa738 req-ba66ade5-c47d-4098-b000-cf65b63f5501 service nova] Acquired lock "refresh_cache-15d17772-ac57-49a3-b261-bf49b902f658" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 619.687516] env[69994]: DEBUG nova.network.neutron [req-cdf75fbf-64c0-46f6-9b7d-86a5a3ffa738 req-ba66ade5-c47d-4098-b000-cf65b63f5501 service nova] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Refreshing network info cache for port 239a38e8-39e8-487b-ba99-cf85c99d41f1 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 619.871223] env[69994]: DEBUG oslo_vmware.api [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241223, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.480042} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.871496] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2/2244e8ad-75f6-42bc-a97d-7f26eaba1aa2.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 619.871718] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 619.871969] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-339bfd12-0870-47db-bcdc-c69fc2e5dfef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.878940] env[69994]: DEBUG oslo_vmware.api [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Waiting for the task: (returnval){ [ 619.878940] env[69994]: value = "task-3241224" [ 619.878940] env[69994]: _type = "Task" [ 619.878940] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.889663] env[69994]: DEBUG oslo_vmware.api [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241224, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.963386] env[69994]: DEBUG oslo_vmware.api [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]525d365d-7082-7d44-eac7-c38720038d2e, 'name': SearchDatastore_Task, 'duration_secs': 0.008126} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.963660] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 619.963927] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] e9bc15f9-e957-487f-b8d5-d1332b185dcf/e9bc15f9-e957-487f-b8d5-d1332b185dcf.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 619.964222] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 619.964406] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 619.964621] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-65585db6-f231-410a-90df-f3e5103f31ad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.968179] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0fc7d36d-c6f4-42ab-8075-2f9ae48f646b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.981743] env[69994]: DEBUG oslo_vmware.api [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Waiting for the task: (returnval){ [ 619.981743] env[69994]: value = "task-3241225" [ 619.981743] env[69994]: _type = "Task" [ 619.981743] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.990040] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Acquiring lock "c47c26c8-3f7f-436b-95aa-0bd08d41e62b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 619.990111] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Lock "c47c26c8-3f7f-436b-95aa-0bd08d41e62b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 619.990311] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 619.990464] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 619.992129] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8e2cfc4-9a7d-4d71-ad22-219bfdacd56a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.999194] env[69994]: DEBUG oslo_vmware.api [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241225, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.003538] env[69994]: DEBUG oslo_vmware.api [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for the task: (returnval){ [ 620.003538] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]524d7b22-43a5-8d85-8bae-dea0a44e90ec" [ 620.003538] env[69994]: _type = "Task" [ 620.003538] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.013201] env[69994]: DEBUG oslo_vmware.api [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]524d7b22-43a5-8d85-8bae-dea0a44e90ec, 'name': SearchDatastore_Task, 'duration_secs': 0.009573} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.013950] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80c4419f-626b-410d-8810-415bf5492671 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.024575] env[69994]: DEBUG oslo_vmware.api [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for the task: (returnval){ [ 620.024575] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5224d7da-16a8-2638-94f7-d6c00f36626f" [ 620.024575] env[69994]: _type = "Task" [ 620.024575] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.033914] env[69994]: DEBUG oslo_vmware.api [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5224d7da-16a8-2638-94f7-d6c00f36626f, 'name': SearchDatastore_Task, 'duration_secs': 0.007788} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.035739] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 620.036542] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] dbad6bed-64ba-4dfd-abad-c0b2c775ba2c/dbad6bed-64ba-4dfd-abad-c0b2c775ba2c.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 620.036542] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 620.036801] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 620.037498] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0c5badfc-8700-4ab1-b1f9-74df1b8cfa3b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.039334] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2a11b52c-62e0-4bbe-bb36-1b95943caf89 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.052013] env[69994]: DEBUG oslo_vmware.api [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for the task: (returnval){ [ 620.052013] env[69994]: value = "task-3241226" [ 620.052013] env[69994]: _type = "Task" [ 620.052013] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.063199] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 620.063414] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 620.068245] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3808bd95-72b7-4b71-8d06-87d67f31f1c4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.078425] env[69994]: DEBUG oslo_vmware.api [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241226, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.082248] env[69994]: DEBUG oslo_vmware.api [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Waiting for the task: (returnval){ [ 620.082248] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52a58fce-a592-fe60-df67-197c85afd7f2" [ 620.082248] env[69994]: _type = "Task" [ 620.082248] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.096324] env[69994]: DEBUG oslo_vmware.api [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52a58fce-a592-fe60-df67-197c85afd7f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.147181] env[69994]: DEBUG nova.network.neutron [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 620.154954] env[69994]: DEBUG oslo_concurrency.lockutils [None req-97e070b9-9cda-4ffc-acc2-5c09551d9cad tempest-DeleteServersAdminTestJSON-1685494246 tempest-DeleteServersAdminTestJSON-1685494246-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 620.164685] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4980ad2c-bc32-4de6-8707-5a9f7b9409e2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.172304] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5827e781-a9d8-463e-9e1f-50d38e55a061 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.207905] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0596fa36-d612-406a-b6b8-0ae58b6c8c05 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.215725] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f3666b7-a862-4935-bdde-b7020b7d06eb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.232922] env[69994]: DEBUG nova.compute.provider_tree [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 620.398184] env[69994]: DEBUG oslo_vmware.api [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241224, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068117} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.398582] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 620.399489] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4df0e696-245f-49d4-a7a7-553bfed5ab2f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.428040] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Reconfiguring VM instance instance-00000004 to attach disk [datastore1] 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2/2244e8ad-75f6-42bc-a97d-7f26eaba1aa2.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 620.430847] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4be40583-9eee-4559-b37a-9395356714d1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.452419] env[69994]: DEBUG oslo_vmware.api [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Waiting for the task: (returnval){ [ 620.452419] env[69994]: value = "task-3241227" [ 620.452419] env[69994]: _type = "Task" [ 620.452419] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.467678] env[69994]: DEBUG oslo_vmware.api [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241227, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.493056] env[69994]: DEBUG oslo_vmware.api [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241225, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.563716] env[69994]: DEBUG oslo_vmware.api [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241226, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.595662] env[69994]: DEBUG oslo_vmware.api [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52a58fce-a592-fe60-df67-197c85afd7f2, 'name': SearchDatastore_Task, 'duration_secs': 0.009749} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.597405] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1bec78ce-5c1d-4bc0-a7f2-2c625bfaeecd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.605193] env[69994]: DEBUG oslo_vmware.api [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Waiting for the task: (returnval){ [ 620.605193] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52d33a84-b9a1-39ad-8b8b-308b82fc6085" [ 620.605193] env[69994]: _type = "Task" [ 620.605193] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.619307] env[69994]: DEBUG oslo_vmware.api [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52d33a84-b9a1-39ad-8b8b-308b82fc6085, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.737394] env[69994]: DEBUG nova.scheduler.client.report [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 620.905601] env[69994]: DEBUG nova.network.neutron [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Updating instance_info_cache with network_info: [{"id": "c52664a0-1200-4c1c-9848-50d360e81f40", "address": "fa:16:3e:07:2b:74", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.149", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc52664a0-12", "ovs_interfaceid": "c52664a0-1200-4c1c-9848-50d360e81f40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.962291] env[69994]: DEBUG oslo_vmware.api [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241227, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.972984] env[69994]: DEBUG nova.network.neutron [req-cdf75fbf-64c0-46f6-9b7d-86a5a3ffa738 req-ba66ade5-c47d-4098-b000-cf65b63f5501 service nova] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Updated VIF entry in instance network info cache for port 239a38e8-39e8-487b-ba99-cf85c99d41f1. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 620.973522] env[69994]: DEBUG nova.network.neutron [req-cdf75fbf-64c0-46f6-9b7d-86a5a3ffa738 req-ba66ade5-c47d-4098-b000-cf65b63f5501 service nova] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Updating instance_info_cache with network_info: [{"id": "239a38e8-39e8-487b-ba99-cf85c99d41f1", "address": "fa:16:3e:67:40:01", "network": {"id": "f408d322-470b-4fe5-9b10-67ef413491d1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1757029696-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5543f4937e604cc189cc63c178705112", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap239a38e8-39", "ovs_interfaceid": "239a38e8-39e8-487b-ba99-cf85c99d41f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.994073] env[69994]: DEBUG oslo_vmware.api [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241225, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.521227} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.994327] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] e9bc15f9-e957-487f-b8d5-d1332b185dcf/e9bc15f9-e957-487f-b8d5-d1332b185dcf.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 620.994530] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: e9bc15f9-e957-487f-b8d5-d1332b185dcf] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 620.994772] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6d134dae-16de-42be-ab72-49d992ff165b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.000955] env[69994]: DEBUG oslo_vmware.api [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Waiting for the task: (returnval){ [ 621.000955] env[69994]: value = "task-3241228" [ 621.000955] env[69994]: _type = "Task" [ 621.000955] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.010946] env[69994]: DEBUG oslo_vmware.api [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241228, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.061112] env[69994]: DEBUG oslo_vmware.api [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241226, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.80692} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.061513] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] dbad6bed-64ba-4dfd-abad-c0b2c775ba2c/dbad6bed-64ba-4dfd-abad-c0b2c775ba2c.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 621.061760] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 621.062060] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-748ea7a1-0db5-4b1b-a831-2e0e065a0c5f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.068332] env[69994]: DEBUG oslo_vmware.api [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for the task: (returnval){ [ 621.068332] env[69994]: value = "task-3241229" [ 621.068332] env[69994]: _type = "Task" [ 621.068332] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.074205] env[69994]: DEBUG nova.network.neutron [req-872f0221-fce5-408c-9ecf-2006ac922c7d req-5c7843d5-feee-42e6-950a-f7a65a856ac9 service nova] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Updated VIF entry in instance network info cache for port 1c4ae184-b8b0-409f-aff4-5568af2af1b9. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 621.074205] env[69994]: DEBUG nova.network.neutron [req-872f0221-fce5-408c-9ecf-2006ac922c7d req-5c7843d5-feee-42e6-950a-f7a65a856ac9 service nova] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Updating instance_info_cache with network_info: [{"id": "1c4ae184-b8b0-409f-aff4-5568af2af1b9", "address": "fa:16:3e:23:d3:c2", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.61", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c4ae184-b8", "ovs_interfaceid": "1c4ae184-b8b0-409f-aff4-5568af2af1b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.080568] env[69994]: DEBUG oslo_vmware.api [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241229, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.116739] env[69994]: DEBUG oslo_vmware.api [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52d33a84-b9a1-39ad-8b8b-308b82fc6085, 'name': SearchDatastore_Task, 'duration_secs': 0.06097} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.117035] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 621.117290] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 15d17772-ac57-49a3-b261-bf49b902f658/15d17772-ac57-49a3-b261-bf49b902f658.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 621.117536] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2607ea6f-7bdf-4395-b69b-971880bd464e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.124862] env[69994]: DEBUG oslo_vmware.api [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Waiting for the task: (returnval){ [ 621.124862] env[69994]: value = "task-3241230" [ 621.124862] env[69994]: _type = "Task" [ 621.124862] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.133818] env[69994]: DEBUG oslo_vmware.api [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Task: {'id': task-3241230, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.249054] env[69994]: DEBUG oslo_concurrency.lockutils [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.488s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 621.249054] env[69994]: DEBUG nova.compute.manager [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 621.249054] env[69994]: DEBUG oslo_concurrency.lockutils [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.870s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 621.251360] env[69994]: INFO nova.compute.claims [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 621.408801] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Releasing lock "refresh_cache-6ca85dc6-ace9-4c5e-a11e-a3d5060d766b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 621.408801] env[69994]: DEBUG nova.compute.manager [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Instance network_info: |[{"id": "c52664a0-1200-4c1c-9848-50d360e81f40", "address": "fa:16:3e:07:2b:74", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.149", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc52664a0-12", "ovs_interfaceid": "c52664a0-1200-4c1c-9848-50d360e81f40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 621.409199] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:07:2b:74', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0cd5d325-3053-407e-a4ee-f627e82a23f9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c52664a0-1200-4c1c-9848-50d360e81f40', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 621.417117] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Creating folder: Project (d94527f3e4404496ba30fa03ab0f3888). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 621.417375] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6d451628-fccb-43fe-a52f-b5565c29f97c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.430863] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Created folder: Project (d94527f3e4404496ba30fa03ab0f3888) in parent group-v647729. [ 621.432393] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Creating folder: Instances. Parent ref: group-v647751. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 621.432393] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-878d55ad-362d-472a-8fa5-dd7840162fc5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.442209] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Created folder: Instances in parent group-v647751. [ 621.442377] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 621.443602] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 621.443602] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c65ff882-3d02-4aec-9d7c-0e7352ee3edb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.467706] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 621.467706] env[69994]: value = "task-3241233" [ 621.467706] env[69994]: _type = "Task" [ 621.467706] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.474245] env[69994]: DEBUG oslo_vmware.api [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241227, 'name': ReconfigVM_Task, 'duration_secs': 0.613087} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.474980] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Reconfigured VM instance instance-00000004 to attach disk [datastore1] 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2/2244e8ad-75f6-42bc-a97d-7f26eaba1aa2.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 621.475929] env[69994]: DEBUG oslo_concurrency.lockutils [req-cdf75fbf-64c0-46f6-9b7d-86a5a3ffa738 req-ba66ade5-c47d-4098-b000-cf65b63f5501 service nova] Releasing lock "refresh_cache-15d17772-ac57-49a3-b261-bf49b902f658" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 621.476315] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1d0b38fc-dd6f-4923-b7a2-224d4b0aac7f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.481784] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241233, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.489071] env[69994]: DEBUG oslo_vmware.api [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Waiting for the task: (returnval){ [ 621.489071] env[69994]: value = "task-3241234" [ 621.489071] env[69994]: _type = "Task" [ 621.489071] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.500251] env[69994]: DEBUG oslo_vmware.api [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241234, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.512978] env[69994]: DEBUG oslo_vmware.api [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241228, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071232} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.513433] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: e9bc15f9-e957-487f-b8d5-d1332b185dcf] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 621.514389] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc9b640f-d770-432b-9017-7a09ae2f2662 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.543185] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: e9bc15f9-e957-487f-b8d5-d1332b185dcf] Reconfiguring VM instance instance-00000007 to attach disk [datastore1] e9bc15f9-e957-487f-b8d5-d1332b185dcf/e9bc15f9-e957-487f-b8d5-d1332b185dcf.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 621.543185] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d1eafca9-9b66-43c3-add1-a2516221c535 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.562202] env[69994]: DEBUG oslo_vmware.api [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Waiting for the task: (returnval){ [ 621.562202] env[69994]: value = "task-3241235" [ 621.562202] env[69994]: _type = "Task" [ 621.562202] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.573693] env[69994]: DEBUG oslo_vmware.api [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241235, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.584415] env[69994]: DEBUG oslo_concurrency.lockutils [req-872f0221-fce5-408c-9ecf-2006ac922c7d req-5c7843d5-feee-42e6-950a-f7a65a856ac9 service nova] Releasing lock "refresh_cache-dbad6bed-64ba-4dfd-abad-c0b2c775ba2c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 621.584901] env[69994]: DEBUG oslo_vmware.api [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241229, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.164237} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.585149] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 621.586007] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cbd0431-24b4-43a0-8c19-b85411810390 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.614870] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Reconfiguring VM instance instance-00000005 to attach disk [datastore1] dbad6bed-64ba-4dfd-abad-c0b2c775ba2c/dbad6bed-64ba-4dfd-abad-c0b2c775ba2c.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 621.614870] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-66d5f06f-3874-453c-9a35-6664b7881c66 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.646866] env[69994]: DEBUG oslo_vmware.api [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Task: {'id': task-3241230, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.646866] env[69994]: DEBUG oslo_vmware.api [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for the task: (returnval){ [ 621.646866] env[69994]: value = "task-3241236" [ 621.646866] env[69994]: _type = "Task" [ 621.646866] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.654903] env[69994]: DEBUG oslo_vmware.api [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241236, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.756633] env[69994]: DEBUG nova.compute.utils [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 621.762238] env[69994]: DEBUG nova.compute.manager [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Not allocating networking since 'none' was specified. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 621.872306] env[69994]: DEBUG nova.compute.manager [req-83b16aa6-43e5-452d-a962-77f37aeda1c8 req-29c61e42-bd87-42a1-81d8-6c82030d015b service nova] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Received event network-vif-plugged-c52664a0-1200-4c1c-9848-50d360e81f40 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 621.872551] env[69994]: DEBUG oslo_concurrency.lockutils [req-83b16aa6-43e5-452d-a962-77f37aeda1c8 req-29c61e42-bd87-42a1-81d8-6c82030d015b service nova] Acquiring lock "6ca85dc6-ace9-4c5e-a11e-a3d5060d766b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 621.872715] env[69994]: DEBUG oslo_concurrency.lockutils [req-83b16aa6-43e5-452d-a962-77f37aeda1c8 req-29c61e42-bd87-42a1-81d8-6c82030d015b service nova] Lock "6ca85dc6-ace9-4c5e-a11e-a3d5060d766b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 621.872883] env[69994]: DEBUG oslo_concurrency.lockutils [req-83b16aa6-43e5-452d-a962-77f37aeda1c8 req-29c61e42-bd87-42a1-81d8-6c82030d015b service nova] Lock "6ca85dc6-ace9-4c5e-a11e-a3d5060d766b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 621.875064] env[69994]: DEBUG nova.compute.manager [req-83b16aa6-43e5-452d-a962-77f37aeda1c8 req-29c61e42-bd87-42a1-81d8-6c82030d015b service nova] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] No waiting events found dispatching network-vif-plugged-c52664a0-1200-4c1c-9848-50d360e81f40 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 621.875409] env[69994]: WARNING nova.compute.manager [req-83b16aa6-43e5-452d-a962-77f37aeda1c8 req-29c61e42-bd87-42a1-81d8-6c82030d015b service nova] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Received unexpected event network-vif-plugged-c52664a0-1200-4c1c-9848-50d360e81f40 for instance with vm_state building and task_state spawning. [ 621.875494] env[69994]: DEBUG nova.compute.manager [req-83b16aa6-43e5-452d-a962-77f37aeda1c8 req-29c61e42-bd87-42a1-81d8-6c82030d015b service nova] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Received event network-changed-c52664a0-1200-4c1c-9848-50d360e81f40 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 621.875652] env[69994]: DEBUG nova.compute.manager [req-83b16aa6-43e5-452d-a962-77f37aeda1c8 req-29c61e42-bd87-42a1-81d8-6c82030d015b service nova] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Refreshing instance network info cache due to event network-changed-c52664a0-1200-4c1c-9848-50d360e81f40. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 621.875868] env[69994]: DEBUG oslo_concurrency.lockutils [req-83b16aa6-43e5-452d-a962-77f37aeda1c8 req-29c61e42-bd87-42a1-81d8-6c82030d015b service nova] Acquiring lock "refresh_cache-6ca85dc6-ace9-4c5e-a11e-a3d5060d766b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.875979] env[69994]: DEBUG oslo_concurrency.lockutils [req-83b16aa6-43e5-452d-a962-77f37aeda1c8 req-29c61e42-bd87-42a1-81d8-6c82030d015b service nova] Acquired lock "refresh_cache-6ca85dc6-ace9-4c5e-a11e-a3d5060d766b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 621.876145] env[69994]: DEBUG nova.network.neutron [req-83b16aa6-43e5-452d-a962-77f37aeda1c8 req-29c61e42-bd87-42a1-81d8-6c82030d015b service nova] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Refreshing network info cache for port c52664a0-1200-4c1c-9848-50d360e81f40 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 621.980075] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241233, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.000668] env[69994]: DEBUG oslo_vmware.api [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241234, 'name': Rename_Task, 'duration_secs': 0.360146} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.000668] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 622.000668] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-626f1129-f6ef-40df-82ed-9b1623b6fe80 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.007031] env[69994]: DEBUG oslo_vmware.api [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Waiting for the task: (returnval){ [ 622.007031] env[69994]: value = "task-3241237" [ 622.007031] env[69994]: _type = "Task" [ 622.007031] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.015904] env[69994]: DEBUG oslo_vmware.api [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241237, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.074750] env[69994]: DEBUG oslo_vmware.api [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241235, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.142187] env[69994]: DEBUG oslo_vmware.api [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Task: {'id': task-3241230, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.519545} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.142745] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 15d17772-ac57-49a3-b261-bf49b902f658/15d17772-ac57-49a3-b261-bf49b902f658.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 622.144634] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 622.146590] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d7c70897-8cb8-4c42-aed2-f76565427984 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.160158] env[69994]: DEBUG oslo_vmware.api [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241236, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.161276] env[69994]: DEBUG oslo_vmware.api [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Waiting for the task: (returnval){ [ 622.161276] env[69994]: value = "task-3241238" [ 622.161276] env[69994]: _type = "Task" [ 622.161276] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.171105] env[69994]: DEBUG oslo_vmware.api [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Task: {'id': task-3241238, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.261112] env[69994]: DEBUG nova.compute.manager [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 622.440837] env[69994]: DEBUG oslo_concurrency.lockutils [None req-500e383a-cbfb-4ee9-b1aa-fddf1a8da572 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Acquiring lock "91666839-f440-499e-acf0-07d352e701ab" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 622.441086] env[69994]: DEBUG oslo_concurrency.lockutils [None req-500e383a-cbfb-4ee9-b1aa-fddf1a8da572 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Lock "91666839-f440-499e-acf0-07d352e701ab" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 622.441292] env[69994]: DEBUG oslo_concurrency.lockutils [None req-500e383a-cbfb-4ee9-b1aa-fddf1a8da572 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Acquiring lock "91666839-f440-499e-acf0-07d352e701ab-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 622.441482] env[69994]: DEBUG oslo_concurrency.lockutils [None req-500e383a-cbfb-4ee9-b1aa-fddf1a8da572 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Lock "91666839-f440-499e-acf0-07d352e701ab-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 622.441648] env[69994]: DEBUG oslo_concurrency.lockutils [None req-500e383a-cbfb-4ee9-b1aa-fddf1a8da572 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Lock "91666839-f440-499e-acf0-07d352e701ab-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 622.447072] env[69994]: INFO nova.compute.manager [None req-500e383a-cbfb-4ee9-b1aa-fddf1a8da572 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] [instance: 91666839-f440-499e-acf0-07d352e701ab] Terminating instance [ 622.483277] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241233, 'name': CreateVM_Task, 'duration_secs': 0.723271} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.483277] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 622.484619] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.484619] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 622.484619] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 622.485664] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b89cbc34-06df-4507-bc4a-fd3d5c60b8d3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.490121] env[69994]: DEBUG oslo_vmware.api [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Waiting for the task: (returnval){ [ 622.490121] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5221e864-28fd-0cca-918c-c77288c2fc20" [ 622.490121] env[69994]: _type = "Task" [ 622.490121] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.498247] env[69994]: DEBUG oslo_vmware.api [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5221e864-28fd-0cca-918c-c77288c2fc20, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.514453] env[69994]: DEBUG oslo_vmware.api [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241237, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.575528] env[69994]: DEBUG oslo_vmware.api [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241235, 'name': ReconfigVM_Task, 'duration_secs': 0.605135} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.581369] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: e9bc15f9-e957-487f-b8d5-d1332b185dcf] Reconfigured VM instance instance-00000007 to attach disk [datastore1] e9bc15f9-e957-487f-b8d5-d1332b185dcf/e9bc15f9-e957-487f-b8d5-d1332b185dcf.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 622.582594] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8f3a3798-0181-4d2f-a56f-d0420526a671 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.591015] env[69994]: DEBUG oslo_vmware.api [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Waiting for the task: (returnval){ [ 622.591015] env[69994]: value = "task-3241239" [ 622.591015] env[69994]: _type = "Task" [ 622.591015] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.598687] env[69994]: DEBUG oslo_vmware.api [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241239, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.600808] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73082650-b5b0-41a6-859a-90c705aa338b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.608443] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e460f22-ece8-4f47-99bd-5d0d3b10f3bd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.651022] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b44ddcbd-7157-49b1-8e4d-94878f9a197c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.673251] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3a35283-8b64-4cf2-a3fa-db1ba0c4a713 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.678569] env[69994]: DEBUG oslo_vmware.api [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241236, 'name': ReconfigVM_Task, 'duration_secs': 0.574473} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.678569] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Reconfigured VM instance instance-00000005 to attach disk [datastore1] dbad6bed-64ba-4dfd-abad-c0b2c775ba2c/dbad6bed-64ba-4dfd-abad-c0b2c775ba2c.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 622.679720] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-27df99a1-dc67-434e-a4cf-3936f2598aeb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.693569] env[69994]: DEBUG oslo_vmware.api [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Task: {'id': task-3241238, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.156815} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.694156] env[69994]: DEBUG nova.compute.provider_tree [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 622.698360] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 622.698360] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5e30be9-7e84-4121-86af-795d8758ac65 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.700645] env[69994]: DEBUG oslo_vmware.api [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for the task: (returnval){ [ 622.700645] env[69994]: value = "task-3241240" [ 622.700645] env[69994]: _type = "Task" [ 622.700645] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.727325] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Reconfiguring VM instance instance-00000006 to attach disk [datastore1] 15d17772-ac57-49a3-b261-bf49b902f658/15d17772-ac57-49a3-b261-bf49b902f658.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 622.728286] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-668e76ce-33de-4793-b813-a4295f05af2e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.747773] env[69994]: DEBUG oslo_vmware.api [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241240, 'name': Rename_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.753920] env[69994]: DEBUG oslo_vmware.api [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Waiting for the task: (returnval){ [ 622.753920] env[69994]: value = "task-3241241" [ 622.753920] env[69994]: _type = "Task" [ 622.753920] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.765814] env[69994]: DEBUG oslo_vmware.api [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Task: {'id': task-3241241, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.868619] env[69994]: DEBUG nova.network.neutron [req-83b16aa6-43e5-452d-a962-77f37aeda1c8 req-29c61e42-bd87-42a1-81d8-6c82030d015b service nova] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Updated VIF entry in instance network info cache for port c52664a0-1200-4c1c-9848-50d360e81f40. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 622.868983] env[69994]: DEBUG nova.network.neutron [req-83b16aa6-43e5-452d-a962-77f37aeda1c8 req-29c61e42-bd87-42a1-81d8-6c82030d015b service nova] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Updating instance_info_cache with network_info: [{"id": "c52664a0-1200-4c1c-9848-50d360e81f40", "address": "fa:16:3e:07:2b:74", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.149", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc52664a0-12", "ovs_interfaceid": "c52664a0-1200-4c1c-9848-50d360e81f40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 622.953161] env[69994]: DEBUG nova.compute.manager [None req-500e383a-cbfb-4ee9-b1aa-fddf1a8da572 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] [instance: 91666839-f440-499e-acf0-07d352e701ab] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 622.953161] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-500e383a-cbfb-4ee9-b1aa-fddf1a8da572 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] [instance: 91666839-f440-499e-acf0-07d352e701ab] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 622.954389] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab30c9ab-362a-4770-8a32-0998c2405602 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.963202] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-500e383a-cbfb-4ee9-b1aa-fddf1a8da572 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] [instance: 91666839-f440-499e-acf0-07d352e701ab] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 622.963456] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-228e27cf-a24c-49dc-ad75-f4ca0b6019a0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.972023] env[69994]: DEBUG oslo_vmware.api [None req-500e383a-cbfb-4ee9-b1aa-fddf1a8da572 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Waiting for the task: (returnval){ [ 622.972023] env[69994]: value = "task-3241242" [ 622.972023] env[69994]: _type = "Task" [ 622.972023] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.980325] env[69994]: DEBUG oslo_vmware.api [None req-500e383a-cbfb-4ee9-b1aa-fddf1a8da572 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Task: {'id': task-3241242, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.000624] env[69994]: DEBUG oslo_vmware.api [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5221e864-28fd-0cca-918c-c77288c2fc20, 'name': SearchDatastore_Task, 'duration_secs': 0.017238} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.000624] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 623.000624] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 623.000624] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.000963] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 623.000963] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 623.000963] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f1264c28-5299-400d-aa83-78ffb3f7d155 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.011031] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 623.011172] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 623.012486] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f9cd2ff-0147-4e85-ae82-2b8ce3462c3e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.018211] env[69994]: DEBUG oslo_vmware.api [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241237, 'name': PowerOnVM_Task, 'duration_secs': 0.834523} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.018811] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 623.018811] env[69994]: INFO nova.compute.manager [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Took 17.19 seconds to spawn the instance on the hypervisor. [ 623.019101] env[69994]: DEBUG nova.compute.manager [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 623.019764] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3355e948-8644-438a-9c12-51645318f668 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.024074] env[69994]: DEBUG oslo_vmware.api [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Waiting for the task: (returnval){ [ 623.024074] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]524e07ef-7b81-7bc0-7fa5-a4284dbf068b" [ 623.024074] env[69994]: _type = "Task" [ 623.024074] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.035319] env[69994]: DEBUG oslo_vmware.api [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]524e07ef-7b81-7bc0-7fa5-a4284dbf068b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.082898] env[69994]: DEBUG nova.compute.manager [req-0d123834-cb94-408b-9d89-3b6bdebf0ea6 req-a82070b6-56ec-42aa-9186-36f74c1149ea service nova] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Received event network-vif-deleted-de9c0db5-bb4c-466b-ab75-f2d6b988725d {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 623.102488] env[69994]: DEBUG oslo_vmware.api [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241239, 'name': Rename_Task, 'duration_secs': 0.136725} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.102864] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: e9bc15f9-e957-487f-b8d5-d1332b185dcf] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 623.103168] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-969cf04e-805d-4c70-914a-d0ee9718c084 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.110957] env[69994]: DEBUG oslo_vmware.api [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Waiting for the task: (returnval){ [ 623.110957] env[69994]: value = "task-3241243" [ 623.110957] env[69994]: _type = "Task" [ 623.110957] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.119750] env[69994]: DEBUG oslo_vmware.api [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241243, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.201413] env[69994]: DEBUG nova.scheduler.client.report [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 623.220679] env[69994]: DEBUG oslo_vmware.api [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241240, 'name': Rename_Task, 'duration_secs': 0.163917} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.220679] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 623.221626] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3ceeb3eb-02ab-4c84-894e-0f834625c7a1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.228616] env[69994]: DEBUG oslo_vmware.api [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for the task: (returnval){ [ 623.228616] env[69994]: value = "task-3241244" [ 623.228616] env[69994]: _type = "Task" [ 623.228616] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.247196] env[69994]: DEBUG oslo_vmware.api [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241244, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.264112] env[69994]: DEBUG oslo_vmware.api [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Task: {'id': task-3241241, 'name': ReconfigVM_Task, 'duration_secs': 0.466746} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.264447] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Reconfigured VM instance instance-00000006 to attach disk [datastore1] 15d17772-ac57-49a3-b261-bf49b902f658/15d17772-ac57-49a3-b261-bf49b902f658.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 623.265195] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-abee1a64-8486-41b1-afcf-689dfc7d4b91 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.272086] env[69994]: DEBUG oslo_vmware.api [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Waiting for the task: (returnval){ [ 623.272086] env[69994]: value = "task-3241245" [ 623.272086] env[69994]: _type = "Task" [ 623.272086] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.281390] env[69994]: DEBUG nova.compute.manager [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 623.291832] env[69994]: DEBUG oslo_vmware.api [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Task: {'id': task-3241245, 'name': Rename_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.314222] env[69994]: DEBUG nova.virt.hardware [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 623.314494] env[69994]: DEBUG nova.virt.hardware [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 623.314663] env[69994]: DEBUG nova.virt.hardware [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 623.314848] env[69994]: DEBUG nova.virt.hardware [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 623.315031] env[69994]: DEBUG nova.virt.hardware [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 623.315173] env[69994]: DEBUG nova.virt.hardware [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 623.315496] env[69994]: DEBUG nova.virt.hardware [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 623.315609] env[69994]: DEBUG nova.virt.hardware [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 623.315810] env[69994]: DEBUG nova.virt.hardware [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 623.315975] env[69994]: DEBUG nova.virt.hardware [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 623.316175] env[69994]: DEBUG nova.virt.hardware [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 623.317096] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb02e00a-abeb-4474-bcae-bda06c8b04cd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.327718] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bbde5dc-0299-4d30-8ad2-1c54e09b6204 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.350966] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Instance VIF info [] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 623.361566] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 623.362372] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 623.362372] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d938a967-651c-46c7-b4b7-3aedc1052a11 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.377717] env[69994]: DEBUG oslo_concurrency.lockutils [req-83b16aa6-43e5-452d-a962-77f37aeda1c8 req-29c61e42-bd87-42a1-81d8-6c82030d015b service nova] Releasing lock "refresh_cache-6ca85dc6-ace9-4c5e-a11e-a3d5060d766b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 623.383098] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 623.383098] env[69994]: value = "task-3241246" [ 623.383098] env[69994]: _type = "Task" [ 623.383098] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.392835] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241246, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.491069] env[69994]: DEBUG oslo_vmware.api [None req-500e383a-cbfb-4ee9-b1aa-fddf1a8da572 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Task: {'id': task-3241242, 'name': PowerOffVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.521105] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Acquiring lock "aeb7928a-8307-49e7-b019-a4c674e6369a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 623.521428] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Lock "aeb7928a-8307-49e7-b019-a4c674e6369a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 623.543285] env[69994]: DEBUG oslo_vmware.api [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]524e07ef-7b81-7bc0-7fa5-a4284dbf068b, 'name': SearchDatastore_Task, 'duration_secs': 0.015805} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.545871] env[69994]: INFO nova.compute.manager [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Took 26.87 seconds to build instance. [ 623.548346] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98652d40-5eed-4137-a72f-7e0cc6876157 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.552840] env[69994]: DEBUG oslo_vmware.api [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Waiting for the task: (returnval){ [ 623.552840] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5257ffd0-9e15-5609-1f11-f6df334de144" [ 623.552840] env[69994]: _type = "Task" [ 623.552840] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.563448] env[69994]: DEBUG oslo_vmware.api [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5257ffd0-9e15-5609-1f11-f6df334de144, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.626345] env[69994]: DEBUG oslo_vmware.api [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241243, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.714375] env[69994]: DEBUG oslo_concurrency.lockutils [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.465s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 623.716061] env[69994]: DEBUG nova.compute.manager [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 623.723592] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.240s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 623.723939] env[69994]: INFO nova.compute.claims [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 623.746804] env[69994]: DEBUG oslo_vmware.api [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241244, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.782320] env[69994]: DEBUG oslo_vmware.api [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Task: {'id': task-3241245, 'name': Rename_Task, 'duration_secs': 0.23611} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.782593] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 623.782822] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3c7219c1-f470-4e0e-a15d-c3308054f66f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.790277] env[69994]: DEBUG oslo_vmware.api [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Waiting for the task: (returnval){ [ 623.790277] env[69994]: value = "task-3241247" [ 623.790277] env[69994]: _type = "Task" [ 623.790277] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.800800] env[69994]: DEBUG oslo_vmware.api [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Task: {'id': task-3241247, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.893697] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241246, 'name': CreateVM_Task, 'duration_secs': 0.322598} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.893697] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 623.894290] env[69994]: DEBUG oslo_concurrency.lockutils [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.894290] env[69994]: DEBUG oslo_concurrency.lockutils [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 623.894862] env[69994]: DEBUG oslo_concurrency.lockutils [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 623.894862] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e41bf352-36d2-430c-8e5e-c8f2450c23aa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.902866] env[69994]: DEBUG oslo_vmware.api [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Waiting for the task: (returnval){ [ 623.902866] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5223ee67-bbfc-0911-b881-8c194d03188d" [ 623.902866] env[69994]: _type = "Task" [ 623.902866] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.911730] env[69994]: DEBUG oslo_vmware.api [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5223ee67-bbfc-0911-b881-8c194d03188d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.987450] env[69994]: DEBUG oslo_vmware.api [None req-500e383a-cbfb-4ee9-b1aa-fddf1a8da572 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Task: {'id': task-3241242, 'name': PowerOffVM_Task, 'duration_secs': 0.560011} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.987661] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-500e383a-cbfb-4ee9-b1aa-fddf1a8da572 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] [instance: 91666839-f440-499e-acf0-07d352e701ab] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 623.987869] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-500e383a-cbfb-4ee9-b1aa-fddf1a8da572 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] [instance: 91666839-f440-499e-acf0-07d352e701ab] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 623.988161] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8482b5fb-f827-40c2-b20b-db09e9c71e76 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.050743] env[69994]: DEBUG oslo_concurrency.lockutils [None req-192a3232-6996-4a77-b561-c61e9fdaab2f tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Lock "2244e8ad-75f6-42bc-a97d-7f26eaba1aa2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.392s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 624.054200] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-500e383a-cbfb-4ee9-b1aa-fddf1a8da572 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] [instance: 91666839-f440-499e-acf0-07d352e701ab] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 624.054455] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-500e383a-cbfb-4ee9-b1aa-fddf1a8da572 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] [instance: 91666839-f440-499e-acf0-07d352e701ab] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 624.054642] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-500e383a-cbfb-4ee9-b1aa-fddf1a8da572 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Deleting the datastore file [datastore2] 91666839-f440-499e-acf0-07d352e701ab {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 624.057951] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b26a7c98-d27c-4641-a29d-c47cfb3f7c7f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.066172] env[69994]: DEBUG oslo_vmware.api [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5257ffd0-9e15-5609-1f11-f6df334de144, 'name': SearchDatastore_Task, 'duration_secs': 0.009686} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.068057] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 624.068324] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b/6ca85dc6-ace9-4c5e-a11e-a3d5060d766b.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 624.068905] env[69994]: DEBUG oslo_vmware.api [None req-500e383a-cbfb-4ee9-b1aa-fddf1a8da572 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Waiting for the task: (returnval){ [ 624.068905] env[69994]: value = "task-3241249" [ 624.068905] env[69994]: _type = "Task" [ 624.068905] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.069282] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-39b4678b-44c2-4095-a8cd-37b4dd136b5a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.079685] env[69994]: DEBUG oslo_vmware.api [None req-500e383a-cbfb-4ee9-b1aa-fddf1a8da572 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Task: {'id': task-3241249, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.081147] env[69994]: DEBUG oslo_vmware.api [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Waiting for the task: (returnval){ [ 624.081147] env[69994]: value = "task-3241250" [ 624.081147] env[69994]: _type = "Task" [ 624.081147] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.089049] env[69994]: DEBUG oslo_vmware.api [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Task: {'id': task-3241250, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.121158] env[69994]: DEBUG oslo_vmware.api [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241243, 'name': PowerOnVM_Task, 'duration_secs': 0.594426} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.121158] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: e9bc15f9-e957-487f-b8d5-d1332b185dcf] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 624.121258] env[69994]: INFO nova.compute.manager [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: e9bc15f9-e957-487f-b8d5-d1332b185dcf] Took 11.28 seconds to spawn the instance on the hypervisor. [ 624.121413] env[69994]: DEBUG nova.compute.manager [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: e9bc15f9-e957-487f-b8d5-d1332b185dcf] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 624.122290] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56d1f568-d692-471d-8290-0bc3c8396eaa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.234375] env[69994]: DEBUG nova.compute.utils [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 624.236693] env[69994]: DEBUG nova.compute.manager [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 624.236936] env[69994]: DEBUG nova.network.neutron [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 624.252290] env[69994]: DEBUG oslo_vmware.api [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241244, 'name': PowerOnVM_Task, 'duration_secs': 0.676999} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.252510] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 624.252683] env[69994]: INFO nova.compute.manager [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Took 16.12 seconds to spawn the instance on the hypervisor. [ 624.252871] env[69994]: DEBUG nova.compute.manager [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 624.253915] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-482d4971-d9b0-498c-92af-b5e9b10a18e4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.301540] env[69994]: DEBUG oslo_vmware.api [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Task: {'id': task-3241247, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.380483] env[69994]: DEBUG nova.policy [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4ea53683cc254f28aa0e98a6b9ba5cfd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3405ff9c38dd46ba98df2d9a835ed860', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 624.415895] env[69994]: DEBUG oslo_vmware.api [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5223ee67-bbfc-0911-b881-8c194d03188d, 'name': SearchDatastore_Task, 'duration_secs': 0.030688} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.416229] env[69994]: DEBUG oslo_concurrency.lockutils [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 624.416551] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 624.416838] env[69994]: DEBUG oslo_concurrency.lockutils [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 624.417063] env[69994]: DEBUG oslo_concurrency.lockutils [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 624.417271] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 624.417573] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eaf54164-b7c6-4806-92fc-96645d647957 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.429250] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 624.429530] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 624.430263] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-935bd47e-c5ca-4333-9d1e-9b09598e2ec3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.436487] env[69994]: DEBUG oslo_vmware.api [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Waiting for the task: (returnval){ [ 624.436487] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]522a3f0f-ee24-a10b-9d60-a9a364a01e86" [ 624.436487] env[69994]: _type = "Task" [ 624.436487] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.445496] env[69994]: DEBUG oslo_vmware.api [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]522a3f0f-ee24-a10b-9d60-a9a364a01e86, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.554130] env[69994]: DEBUG nova.compute.manager [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 624.583072] env[69994]: DEBUG oslo_vmware.api [None req-500e383a-cbfb-4ee9-b1aa-fddf1a8da572 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Task: {'id': task-3241249, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.176983} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.585020] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-500e383a-cbfb-4ee9-b1aa-fddf1a8da572 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 624.585620] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-500e383a-cbfb-4ee9-b1aa-fddf1a8da572 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] [instance: 91666839-f440-499e-acf0-07d352e701ab] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 624.585620] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-500e383a-cbfb-4ee9-b1aa-fddf1a8da572 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] [instance: 91666839-f440-499e-acf0-07d352e701ab] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 624.585620] env[69994]: INFO nova.compute.manager [None req-500e383a-cbfb-4ee9-b1aa-fddf1a8da572 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] [instance: 91666839-f440-499e-acf0-07d352e701ab] Took 1.63 seconds to destroy the instance on the hypervisor. [ 624.585881] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-500e383a-cbfb-4ee9-b1aa-fddf1a8da572 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 624.586028] env[69994]: DEBUG nova.compute.manager [-] [instance: 91666839-f440-499e-acf0-07d352e701ab] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 624.586116] env[69994]: DEBUG nova.network.neutron [-] [instance: 91666839-f440-499e-acf0-07d352e701ab] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 624.592771] env[69994]: DEBUG oslo_vmware.api [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Task: {'id': task-3241250, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.643275] env[69994]: INFO nova.compute.manager [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: e9bc15f9-e957-487f-b8d5-d1332b185dcf] Took 22.19 seconds to build instance. [ 624.740331] env[69994]: DEBUG nova.compute.manager [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 624.782427] env[69994]: INFO nova.compute.manager [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Took 28.07 seconds to build instance. [ 624.813345] env[69994]: DEBUG oslo_vmware.api [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Task: {'id': task-3241247, 'name': PowerOnVM_Task, 'duration_secs': 0.849878} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.813666] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 624.813781] env[69994]: INFO nova.compute.manager [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Took 14.28 seconds to spawn the instance on the hypervisor. [ 624.813969] env[69994]: DEBUG nova.compute.manager [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 624.814966] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abaa876b-9bef-4a78-a8de-581d5919211d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.950572] env[69994]: DEBUG oslo_vmware.api [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]522a3f0f-ee24-a10b-9d60-a9a364a01e86, 'name': SearchDatastore_Task, 'duration_secs': 0.02605} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.956964] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba764274-49c2-462d-8e89-2f66a841c35e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.962820] env[69994]: DEBUG oslo_vmware.api [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Waiting for the task: (returnval){ [ 624.962820] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52d9b0e9-f462-ab7a-eaa6-6dfee8b93fd8" [ 624.962820] env[69994]: _type = "Task" [ 624.962820] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.974519] env[69994]: DEBUG oslo_vmware.api [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52d9b0e9-f462-ab7a-eaa6-6dfee8b93fd8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.071602] env[69994]: DEBUG oslo_concurrency.lockutils [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 625.074359] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0070f81-32f9-466c-bacf-4f79858e739e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.082359] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74d8b454-b498-4578-9bd1-8b10b693e277 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.092643] env[69994]: DEBUG oslo_vmware.api [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Task: {'id': task-3241250, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.516612} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.117943] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b/6ca85dc6-ace9-4c5e-a11e-a3d5060d766b.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 625.118238] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 625.119070] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-85286367-ad4a-4021-8f22-49b1f790cd5e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.121547] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1d0c003-98bb-4fa6-8e17-bc2b7e998f21 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.129194] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6253ccdb-bbbc-4d46-b17f-3a911f1a4a33 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.136588] env[69994]: DEBUG oslo_vmware.api [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Waiting for the task: (returnval){ [ 625.136588] env[69994]: value = "task-3241251" [ 625.136588] env[69994]: _type = "Task" [ 625.136588] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.151998] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6feba0e0-a55e-4db2-8fb3-69cc3834c6b0 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Lock "e9bc15f9-e957-487f-b8d5-d1332b185dcf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.707s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 625.152541] env[69994]: DEBUG nova.compute.provider_tree [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 625.158519] env[69994]: DEBUG oslo_vmware.api [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Task: {'id': task-3241251, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.287182] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a25561d-50e0-48eb-b991-faa7e12840eb tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Lock "dbad6bed-64ba-4dfd-abad-c0b2c775ba2c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.579s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 625.341348] env[69994]: INFO nova.compute.manager [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Took 24.35 seconds to build instance. [ 625.444584] env[69994]: DEBUG nova.network.neutron [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Successfully created port: be9f669d-36ab-4cbd-a56f-5db33a833aa8 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 625.457903] env[69994]: DEBUG nova.network.neutron [-] [instance: 91666839-f440-499e-acf0-07d352e701ab] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.476993] env[69994]: DEBUG oslo_vmware.api [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52d9b0e9-f462-ab7a-eaa6-6dfee8b93fd8, 'name': SearchDatastore_Task, 'duration_secs': 0.060575} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.478849] env[69994]: DEBUG oslo_concurrency.lockutils [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 625.479191] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 84efe900-1d79-42f9-b3c6-54299757cdbc/84efe900-1d79-42f9-b3c6-54299757cdbc.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 625.483331] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e237b3d7-3901-4433-a3ab-c6d8b3390cc7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.486503] env[69994]: DEBUG oslo_concurrency.lockutils [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Acquiring lock "1d548f54-4ffa-4299-9212-717350558ad4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 625.486762] env[69994]: DEBUG oslo_concurrency.lockutils [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Lock "1d548f54-4ffa-4299-9212-717350558ad4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 625.492017] env[69994]: DEBUG oslo_vmware.api [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Waiting for the task: (returnval){ [ 625.492017] env[69994]: value = "task-3241252" [ 625.492017] env[69994]: _type = "Task" [ 625.492017] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.501939] env[69994]: DEBUG oslo_vmware.api [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241252, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.648162] env[69994]: DEBUG oslo_vmware.api [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Task: {'id': task-3241251, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063323} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.648798] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 625.649830] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68db1c4c-b72f-46aa-a7f2-b4899effa2d7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.671232] env[69994]: DEBUG nova.scheduler.client.report [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 625.674534] env[69994]: DEBUG nova.compute.manager [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 625.687227] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Reconfiguring VM instance instance-00000008 to attach disk [datastore1] 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b/6ca85dc6-ace9-4c5e-a11e-a3d5060d766b.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 625.688242] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-039c4856-d868-43c2-afee-f4c0d078d2d8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.715105] env[69994]: DEBUG oslo_vmware.api [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Waiting for the task: (returnval){ [ 625.715105] env[69994]: value = "task-3241253" [ 625.715105] env[69994]: _type = "Task" [ 625.715105] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.724587] env[69994]: DEBUG oslo_vmware.api [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Task: {'id': task-3241253, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.751956] env[69994]: DEBUG nova.compute.manager [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 625.789753] env[69994]: DEBUG nova.compute.manager [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 625.794911] env[69994]: DEBUG nova.virt.hardware [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 625.795179] env[69994]: DEBUG nova.virt.hardware [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 625.795341] env[69994]: DEBUG nova.virt.hardware [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 625.795528] env[69994]: DEBUG nova.virt.hardware [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 625.795677] env[69994]: DEBUG nova.virt.hardware [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 625.795824] env[69994]: DEBUG nova.virt.hardware [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 625.796317] env[69994]: DEBUG nova.virt.hardware [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 625.796317] env[69994]: DEBUG nova.virt.hardware [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 625.796406] env[69994]: DEBUG nova.virt.hardware [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 625.796511] env[69994]: DEBUG nova.virt.hardware [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 625.796683] env[69994]: DEBUG nova.virt.hardware [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 625.797738] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52475962-bcc6-4fa6-b977-ab54250f7791 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.806234] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81139f79-dcd1-466b-bd0d-6c646c495d17 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.841681] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d4f45106-72f8-4f8f-b13e-28c03c6eacd6 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Lock "15d17772-ac57-49a3-b261-bf49b902f658" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.863s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 625.963293] env[69994]: INFO nova.compute.manager [-] [instance: 91666839-f440-499e-acf0-07d352e701ab] Took 1.38 seconds to deallocate network for instance. [ 626.008326] env[69994]: DEBUG oslo_vmware.api [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241252, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.192043] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.469s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 626.192622] env[69994]: DEBUG nova.compute.manager [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 626.198171] env[69994]: DEBUG oslo_concurrency.lockutils [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.874s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 626.202218] env[69994]: INFO nova.compute.claims [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 626.228516] env[69994]: DEBUG oslo_vmware.api [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Task: {'id': task-3241253, 'name': ReconfigVM_Task, 'duration_secs': 0.443339} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.229813] env[69994]: DEBUG oslo_concurrency.lockutils [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 626.233446] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Reconfigured VM instance instance-00000008 to attach disk [datastore1] 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b/6ca85dc6-ace9-4c5e-a11e-a3d5060d766b.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 626.234279] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-883dd585-7365-429d-b029-2c3ce6cb4db7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.241065] env[69994]: DEBUG oslo_vmware.api [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Waiting for the task: (returnval){ [ 626.241065] env[69994]: value = "task-3241254" [ 626.241065] env[69994]: _type = "Task" [ 626.241065] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.255038] env[69994]: DEBUG oslo_vmware.api [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Task: {'id': task-3241254, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.325322] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 626.344554] env[69994]: DEBUG nova.compute.manager [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 626.471978] env[69994]: DEBUG oslo_concurrency.lockutils [None req-500e383a-cbfb-4ee9-b1aa-fddf1a8da572 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 626.510192] env[69994]: DEBUG oslo_vmware.api [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241252, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.680913} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.511480] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 84efe900-1d79-42f9-b3c6-54299757cdbc/84efe900-1d79-42f9-b3c6-54299757cdbc.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 626.511480] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 626.511480] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2abc9c19-16c6-4b23-a7cf-f541ce27ab6c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.519773] env[69994]: DEBUG oslo_vmware.api [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Waiting for the task: (returnval){ [ 626.519773] env[69994]: value = "task-3241255" [ 626.519773] env[69994]: _type = "Task" [ 626.519773] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.529915] env[69994]: DEBUG oslo_vmware.api [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241255, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.708176] env[69994]: DEBUG nova.compute.utils [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 626.709821] env[69994]: DEBUG nova.compute.manager [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 626.710024] env[69994]: DEBUG nova.network.neutron [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] [instance: 2f710439-0216-401e-9759-af584f9bd00d] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 626.753946] env[69994]: DEBUG oslo_vmware.api [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Task: {'id': task-3241254, 'name': Rename_Task, 'duration_secs': 0.159313} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.753946] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 626.753946] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e2b5da7b-c4d0-47a4-bb2c-118ff047fcc6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.756799] env[69994]: DEBUG nova.policy [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '889db88e7c11407fa83c4984616df70a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7f33906db9fd416884267f628a3f05ed', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 626.764595] env[69994]: DEBUG oslo_vmware.api [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Waiting for the task: (returnval){ [ 626.764595] env[69994]: value = "task-3241256" [ 626.764595] env[69994]: _type = "Task" [ 626.764595] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.776346] env[69994]: DEBUG oslo_vmware.api [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Task: {'id': task-3241256, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.874512] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 627.031924] env[69994]: DEBUG oslo_vmware.api [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241255, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067778} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.031924] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 627.033076] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea7a7b81-191e-4dd4-b892-7cd8c400621e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.059247] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Reconfiguring VM instance instance-00000009 to attach disk [datastore2] 84efe900-1d79-42f9-b3c6-54299757cdbc/84efe900-1d79-42f9-b3c6-54299757cdbc.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 627.059247] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-35de0465-7583-4d23-b1ec-6b482a7d818e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.088578] env[69994]: DEBUG oslo_vmware.api [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Waiting for the task: (returnval){ [ 627.088578] env[69994]: value = "task-3241257" [ 627.088578] env[69994]: _type = "Task" [ 627.088578] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.098510] env[69994]: DEBUG oslo_vmware.api [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241257, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.102243] env[69994]: DEBUG nova.compute.manager [req-4490db4d-192b-4715-9849-9fbd82cfe471 req-8399c625-b8bd-44c3-82c3-49f405062eb3 service nova] [instance: 91666839-f440-499e-acf0-07d352e701ab] Received event network-vif-deleted-ce41f31f-0a63-4393-90a3-de51de55789e {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 627.217032] env[69994]: DEBUG nova.compute.manager [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 627.239361] env[69994]: DEBUG nova.network.neutron [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Successfully created port: b3347b62-0c9c-4b6c-8d07-587f2423850c {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 627.252462] env[69994]: DEBUG nova.compute.manager [None req-ea52e3f3-fa95-4d37-ba06-d1616edf8f81 tempest-ServerDiagnosticsV248Test-819149820 tempest-ServerDiagnosticsV248Test-819149820-project-admin] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 627.258996] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68f54a5f-1630-4862-9bf4-b103cf056952 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.281185] env[69994]: INFO nova.compute.manager [None req-ea52e3f3-fa95-4d37-ba06-d1616edf8f81 tempest-ServerDiagnosticsV248Test-819149820 tempest-ServerDiagnosticsV248Test-819149820-project-admin] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Retrieving diagnostics [ 627.289058] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c5634e4-99ed-403e-b01f-b04650fe4981 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.292617] env[69994]: DEBUG oslo_vmware.api [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Task: {'id': task-3241256, 'name': PowerOnVM_Task} progress is 1%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.602463] env[69994]: DEBUG oslo_vmware.api [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241257, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.654431] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3184c403-315d-4bdd-a209-e9c398ac2229 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.663481] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-279d8ab8-78da-4d77-89f4-c10b07a4351e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.700298] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46afe57c-91d7-4efd-aa86-fcc32c080983 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.707150] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5655d77a-a256-41c1-b3e4-35d43d422f06 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.723558] env[69994]: DEBUG nova.compute.provider_tree [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 627.778794] env[69994]: DEBUG oslo_vmware.api [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Task: {'id': task-3241256, 'name': PowerOnVM_Task} progress is 64%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.102781] env[69994]: DEBUG oslo_vmware.api [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241257, 'name': ReconfigVM_Task, 'duration_secs': 0.564219} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.103086] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Reconfigured VM instance instance-00000009 to attach disk [datastore2] 84efe900-1d79-42f9-b3c6-54299757cdbc/84efe900-1d79-42f9-b3c6-54299757cdbc.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 628.103956] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bbc38b6b-95a1-4ae1-8173-885560412297 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.111463] env[69994]: DEBUG oslo_vmware.api [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Waiting for the task: (returnval){ [ 628.111463] env[69994]: value = "task-3241258" [ 628.111463] env[69994]: _type = "Task" [ 628.111463] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.120976] env[69994]: DEBUG oslo_vmware.api [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241258, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.133762] env[69994]: DEBUG nova.network.neutron [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Successfully updated port: be9f669d-36ab-4cbd-a56f-5db33a833aa8 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 628.230125] env[69994]: DEBUG nova.scheduler.client.report [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 628.235022] env[69994]: DEBUG nova.compute.manager [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 628.280832] env[69994]: DEBUG nova.virt.hardware [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 628.281225] env[69994]: DEBUG nova.virt.hardware [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 628.281287] env[69994]: DEBUG nova.virt.hardware [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 628.281611] env[69994]: DEBUG nova.virt.hardware [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 628.281998] env[69994]: DEBUG nova.virt.hardware [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 628.281998] env[69994]: DEBUG nova.virt.hardware [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 628.281998] env[69994]: DEBUG nova.virt.hardware [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 628.282419] env[69994]: DEBUG nova.virt.hardware [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 628.282419] env[69994]: DEBUG nova.virt.hardware [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 628.282595] env[69994]: DEBUG nova.virt.hardware [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 628.282889] env[69994]: DEBUG nova.virt.hardware [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 628.285034] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-907ac349-c2c9-4818-a6f0-4d52f7f9763d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.293251] env[69994]: DEBUG oslo_vmware.api [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Task: {'id': task-3241256, 'name': PowerOnVM_Task} progress is 80%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.299416] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d804ba7-5187-4a15-9499-2e7cc18588e6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.624021] env[69994]: DEBUG oslo_vmware.api [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241258, 'name': Rename_Task, 'duration_secs': 0.205576} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.624455] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 628.625069] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-966cb776-1911-464b-b520-2ece385c50ac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.634351] env[69994]: DEBUG oslo_vmware.api [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Waiting for the task: (returnval){ [ 628.634351] env[69994]: value = "task-3241259" [ 628.634351] env[69994]: _type = "Task" [ 628.634351] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.638772] env[69994]: DEBUG oslo_concurrency.lockutils [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Acquiring lock "refresh_cache-53a8714c-50f7-4990-a3d9-86f8fc908d03" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.638916] env[69994]: DEBUG oslo_concurrency.lockutils [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Acquired lock "refresh_cache-53a8714c-50f7-4990-a3d9-86f8fc908d03" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 628.639326] env[69994]: DEBUG nova.network.neutron [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 628.648862] env[69994]: DEBUG oslo_vmware.api [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241259, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.742016] env[69994]: DEBUG oslo_concurrency.lockutils [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.540s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 628.742016] env[69994]: DEBUG nova.compute.manager [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 628.748521] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.978s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 628.750096] env[69994]: INFO nova.compute.claims [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 628.785350] env[69994]: DEBUG oslo_vmware.api [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Task: {'id': task-3241256, 'name': PowerOnVM_Task, 'duration_secs': 1.91283} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.785708] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 628.785967] env[69994]: INFO nova.compute.manager [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Took 13.47 seconds to spawn the instance on the hypervisor. [ 628.786198] env[69994]: DEBUG nova.compute.manager [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 628.788043] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff57e66c-2a7e-47e9-844f-9d49fa5969ee {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.152176] env[69994]: DEBUG oslo_vmware.api [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241259, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.177141] env[69994]: DEBUG nova.network.neutron [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Successfully updated port: b3347b62-0c9c-4b6c-8d07-587f2423850c {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 629.249539] env[69994]: DEBUG nova.compute.utils [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 629.251024] env[69994]: DEBUG nova.compute.manager [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 629.251672] env[69994]: DEBUG nova.network.neutron [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 629.274359] env[69994]: DEBUG nova.network.neutron [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 629.312840] env[69994]: DEBUG nova.policy [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '41e25eb110b14afdb3e67bd4dd943e9c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e8b8ab56b87c46f9b960fc3b430197d3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 629.315291] env[69994]: INFO nova.compute.manager [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Took 25.79 seconds to build instance. [ 629.646644] env[69994]: DEBUG oslo_vmware.api [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241259, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.680624] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Acquiring lock "refresh_cache-2f710439-0216-401e-9759-af584f9bd00d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.684286] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Acquired lock "refresh_cache-2f710439-0216-401e-9759-af584f9bd00d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 629.684481] env[69994]: DEBUG nova.network.neutron [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 629.755665] env[69994]: DEBUG nova.compute.manager [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 629.807503] env[69994]: DEBUG nova.network.neutron [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Successfully created port: a16b9fc2-06de-47cb-b39f-b77130ed0eec {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 629.821992] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1d3aa09-feca-4557-bedf-894c2f6597a2 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Lock "6ca85dc6-ace9-4c5e-a11e-a3d5060d766b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.317s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 629.868727] env[69994]: DEBUG nova.network.neutron [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Updating instance_info_cache with network_info: [{"id": "be9f669d-36ab-4cbd-a56f-5db33a833aa8", "address": "fa:16:3e:e2:6b:09", "network": {"id": "309c6f91-53cb-40c8-a685-71dd7afb097b", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-898903642-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3405ff9c38dd46ba98df2d9a835ed860", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b9aabc7c-0f6c-42eb-bd27-493a1496c0c8", "external-id": "nsx-vlan-transportzone-368", "segmentation_id": 368, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe9f669d-36", "ovs_interfaceid": "be9f669d-36ab-4cbd-a56f-5db33a833aa8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.049026] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e36f3b3e-0add-46ce-a7ef-3a438f70ccd6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.055838] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cd273e6-0d5e-4fc1-b1d6-4708e428c6fe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.090545] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdd2929a-a5b5-4749-8256-d7374cce437d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.099280] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef6ce46d-30ac-47e3-adb7-190ad0746737 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.117309] env[69994]: DEBUG nova.compute.provider_tree [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 630.148706] env[69994]: DEBUG oslo_vmware.api [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241259, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.269190] env[69994]: DEBUG nova.network.neutron [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 630.324318] env[69994]: DEBUG nova.compute.manager [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 630.371262] env[69994]: DEBUG oslo_concurrency.lockutils [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Releasing lock "refresh_cache-53a8714c-50f7-4990-a3d9-86f8fc908d03" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 630.373295] env[69994]: DEBUG nova.compute.manager [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Instance network_info: |[{"id": "be9f669d-36ab-4cbd-a56f-5db33a833aa8", "address": "fa:16:3e:e2:6b:09", "network": {"id": "309c6f91-53cb-40c8-a685-71dd7afb097b", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-898903642-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3405ff9c38dd46ba98df2d9a835ed860", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b9aabc7c-0f6c-42eb-bd27-493a1496c0c8", "external-id": "nsx-vlan-transportzone-368", "segmentation_id": 368, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe9f669d-36", "ovs_interfaceid": "be9f669d-36ab-4cbd-a56f-5db33a833aa8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 630.374091] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e2:6b:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b9aabc7c-0f6c-42eb-bd27-493a1496c0c8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'be9f669d-36ab-4cbd-a56f-5db33a833aa8', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 630.381905] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Creating folder: Project (3405ff9c38dd46ba98df2d9a835ed860). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 630.384875] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-15e99ecc-0744-4768-8496-119372ecb59a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.395121] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Created folder: Project (3405ff9c38dd46ba98df2d9a835ed860) in parent group-v647729. [ 630.395327] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Creating folder: Instances. Parent ref: group-v647755. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 630.395555] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-02d72e06-4c45-4a1d-9849-c1fabc300e9c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.404271] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Created folder: Instances in parent group-v647755. [ 630.404514] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 630.404702] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 630.404908] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e1814187-bed4-40d8-a21a-450a892cd762 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.424591] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 630.424591] env[69994]: value = "task-3241262" [ 630.424591] env[69994]: _type = "Task" [ 630.424591] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.433382] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241262, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.622030] env[69994]: DEBUG nova.scheduler.client.report [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 630.649818] env[69994]: DEBUG oslo_vmware.api [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241259, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.693490] env[69994]: DEBUG nova.network.neutron [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Updating instance_info_cache with network_info: [{"id": "b3347b62-0c9c-4b6c-8d07-587f2423850c", "address": "fa:16:3e:2c:18:d7", "network": {"id": "ac661ad7-377f-4b70-9be6-97e7a77207b9", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1514924772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7f33906db9fd416884267f628a3f05ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3347b62-0c", "ovs_interfaceid": "b3347b62-0c9c-4b6c-8d07-587f2423850c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.767132] env[69994]: DEBUG nova.compute.manager [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 630.786369] env[69994]: DEBUG nova.compute.manager [req-4d7f4fc1-5a96-4d9d-954a-1a7de1782dc1 req-567cf788-9d8c-442d-b6c7-813644cd31fa service nova] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Received event network-vif-plugged-be9f669d-36ab-4cbd-a56f-5db33a833aa8 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 630.786369] env[69994]: DEBUG oslo_concurrency.lockutils [req-4d7f4fc1-5a96-4d9d-954a-1a7de1782dc1 req-567cf788-9d8c-442d-b6c7-813644cd31fa service nova] Acquiring lock "53a8714c-50f7-4990-a3d9-86f8fc908d03-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 630.786369] env[69994]: DEBUG oslo_concurrency.lockutils [req-4d7f4fc1-5a96-4d9d-954a-1a7de1782dc1 req-567cf788-9d8c-442d-b6c7-813644cd31fa service nova] Lock "53a8714c-50f7-4990-a3d9-86f8fc908d03-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 630.786369] env[69994]: DEBUG oslo_concurrency.lockutils [req-4d7f4fc1-5a96-4d9d-954a-1a7de1782dc1 req-567cf788-9d8c-442d-b6c7-813644cd31fa service nova] Lock "53a8714c-50f7-4990-a3d9-86f8fc908d03-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 630.786369] env[69994]: DEBUG nova.compute.manager [req-4d7f4fc1-5a96-4d9d-954a-1a7de1782dc1 req-567cf788-9d8c-442d-b6c7-813644cd31fa service nova] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] No waiting events found dispatching network-vif-plugged-be9f669d-36ab-4cbd-a56f-5db33a833aa8 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 630.786614] env[69994]: WARNING nova.compute.manager [req-4d7f4fc1-5a96-4d9d-954a-1a7de1782dc1 req-567cf788-9d8c-442d-b6c7-813644cd31fa service nova] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Received unexpected event network-vif-plugged-be9f669d-36ab-4cbd-a56f-5db33a833aa8 for instance with vm_state building and task_state spawning. [ 630.806612] env[69994]: DEBUG nova.virt.hardware [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 630.807255] env[69994]: DEBUG nova.virt.hardware [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 630.807255] env[69994]: DEBUG nova.virt.hardware [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 630.807255] env[69994]: DEBUG nova.virt.hardware [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 630.807368] env[69994]: DEBUG nova.virt.hardware [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 630.807496] env[69994]: DEBUG nova.virt.hardware [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 630.807705] env[69994]: DEBUG nova.virt.hardware [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 630.807920] env[69994]: DEBUG nova.virt.hardware [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 630.808120] env[69994]: DEBUG nova.virt.hardware [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 630.808289] env[69994]: DEBUG nova.virt.hardware [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 630.808479] env[69994]: DEBUG nova.virt.hardware [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 630.809787] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca393776-d933-45e4-9b96-e64606dac609 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.818899] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccae55df-72af-40f2-b2a7-45b4560f1743 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.867033] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 630.940562] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241262, 'name': CreateVM_Task, 'duration_secs': 0.3931} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.941662] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 630.941776] env[69994]: DEBUG oslo_concurrency.lockutils [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.941989] env[69994]: DEBUG oslo_concurrency.lockutils [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 630.942656] env[69994]: DEBUG oslo_concurrency.lockutils [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 630.943576] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-731f7be6-7700-4867-96a1-7b6f8129ef51 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.950580] env[69994]: DEBUG oslo_vmware.api [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Waiting for the task: (returnval){ [ 630.950580] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]521f82e2-c146-53a2-9cac-c9e0221863fc" [ 630.950580] env[69994]: _type = "Task" [ 630.950580] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.960266] env[69994]: DEBUG oslo_vmware.api [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521f82e2-c146-53a2-9cac-c9e0221863fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.972425] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "45a8dced-6c49-441c-92e2-ee323ed8753c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 630.972425] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "45a8dced-6c49-441c-92e2-ee323ed8753c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 631.127351] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.380s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 631.127964] env[69994]: DEBUG nova.compute.manager [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 631.132350] env[69994]: DEBUG oslo_concurrency.lockutils [None req-97e070b9-9cda-4ffc-acc2-5c09551d9cad tempest-DeleteServersAdminTestJSON-1685494246 tempest-DeleteServersAdminTestJSON-1685494246-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.977s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 631.136132] env[69994]: DEBUG nova.objects.instance [None req-97e070b9-9cda-4ffc-acc2-5c09551d9cad tempest-DeleteServersAdminTestJSON-1685494246 tempest-DeleteServersAdminTestJSON-1685494246-project-admin] Lazy-loading 'resources' on Instance uuid 2e374549-00a2-4014-90e0-ceccbe4360fa {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 631.153383] env[69994]: DEBUG oslo_vmware.api [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241259, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.197507] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Releasing lock "refresh_cache-2f710439-0216-401e-9759-af584f9bd00d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 631.199223] env[69994]: DEBUG nova.compute.manager [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Instance network_info: |[{"id": "b3347b62-0c9c-4b6c-8d07-587f2423850c", "address": "fa:16:3e:2c:18:d7", "network": {"id": "ac661ad7-377f-4b70-9be6-97e7a77207b9", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1514924772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7f33906db9fd416884267f628a3f05ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3347b62-0c", "ovs_interfaceid": "b3347b62-0c9c-4b6c-8d07-587f2423850c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 631.199710] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2c:18:d7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee20e439-fed9-490e-97dd-f3c886977ae1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b3347b62-0c9c-4b6c-8d07-587f2423850c', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 631.208590] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Creating folder: Project (7f33906db9fd416884267f628a3f05ed). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 631.210738] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bd99694c-8349-4787-820e-06d97b59291e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.220453] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Created folder: Project (7f33906db9fd416884267f628a3f05ed) in parent group-v647729. [ 631.220655] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Creating folder: Instances. Parent ref: group-v647758. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 631.221250] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a25bce64-40e5-45d1-8ccf-e746f07bfb9d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.233257] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Created folder: Instances in parent group-v647758. [ 631.233523] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 631.234110] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 631.234358] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ec5a5b16-f6e0-4266-bc5c-01f52f2f0076 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.258175] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 631.258175] env[69994]: value = "task-3241265" [ 631.258175] env[69994]: _type = "Task" [ 631.258175] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.266243] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241265, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.407201] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Acquiring lock "ad957c30-c923-4bbf-8841-00e99de44781" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.407488] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Lock "ad957c30-c923-4bbf-8841-00e99de44781" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 631.417240] env[69994]: DEBUG oslo_concurrency.lockutils [None req-78c4039c-3cbb-4a63-b2cf-2df8fc79ea48 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Acquiring lock "f3945280-ee10-426b-bcab-3e52e8779c55" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.417547] env[69994]: DEBUG oslo_concurrency.lockutils [None req-78c4039c-3cbb-4a63-b2cf-2df8fc79ea48 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Lock "f3945280-ee10-426b-bcab-3e52e8779c55" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 631.417795] env[69994]: DEBUG oslo_concurrency.lockutils [None req-78c4039c-3cbb-4a63-b2cf-2df8fc79ea48 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Acquiring lock "f3945280-ee10-426b-bcab-3e52e8779c55-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.417979] env[69994]: DEBUG oslo_concurrency.lockutils [None req-78c4039c-3cbb-4a63-b2cf-2df8fc79ea48 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Lock "f3945280-ee10-426b-bcab-3e52e8779c55-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 631.418199] env[69994]: DEBUG oslo_concurrency.lockutils [None req-78c4039c-3cbb-4a63-b2cf-2df8fc79ea48 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Lock "f3945280-ee10-426b-bcab-3e52e8779c55-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 631.422022] env[69994]: INFO nova.compute.manager [None req-78c4039c-3cbb-4a63-b2cf-2df8fc79ea48 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Terminating instance [ 631.468580] env[69994]: DEBUG oslo_vmware.api [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521f82e2-c146-53a2-9cac-c9e0221863fc, 'name': SearchDatastore_Task, 'duration_secs': 0.011295} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.468875] env[69994]: DEBUG oslo_concurrency.lockutils [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 631.469131] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 631.470522] env[69994]: DEBUG oslo_concurrency.lockutils [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 631.470522] env[69994]: DEBUG oslo_concurrency.lockutils [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 631.470522] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 631.470522] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7f3f8f33-ed96-4464-a88f-cbdc32a16937 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.480978] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 631.480978] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 631.481790] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e693d0d-14b0-4349-95d2-31f02f4030c9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.487879] env[69994]: DEBUG oslo_vmware.api [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Waiting for the task: (returnval){ [ 631.487879] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52d0d76b-a3f5-6f36-f733-12774eb22cb1" [ 631.487879] env[69994]: _type = "Task" [ 631.487879] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.500359] env[69994]: DEBUG oslo_vmware.api [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52d0d76b-a3f5-6f36-f733-12774eb22cb1, 'name': SearchDatastore_Task, 'duration_secs': 0.010161} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.501954] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-128fd4fa-fd68-4135-988e-236a27c18a60 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.508465] env[69994]: DEBUG oslo_vmware.api [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Waiting for the task: (returnval){ [ 631.508465] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52ce59c3-d46a-fbe3-4a39-bd49f4886ee2" [ 631.508465] env[69994]: _type = "Task" [ 631.508465] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.517719] env[69994]: DEBUG oslo_vmware.api [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52ce59c3-d46a-fbe3-4a39-bd49f4886ee2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.639915] env[69994]: DEBUG nova.compute.utils [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 631.646125] env[69994]: DEBUG nova.compute.manager [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Not allocating networking since 'none' was specified. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 631.654225] env[69994]: DEBUG oslo_vmware.api [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241259, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.656884] env[69994]: DEBUG nova.compute.manager [req-21584eab-c6a2-46c3-bdb0-0b25710791d1 req-65ec683d-fc9d-4fce-a307-2b327d0b3914 service nova] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Received event network-vif-plugged-b3347b62-0c9c-4b6c-8d07-587f2423850c {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 631.657162] env[69994]: DEBUG oslo_concurrency.lockutils [req-21584eab-c6a2-46c3-bdb0-0b25710791d1 req-65ec683d-fc9d-4fce-a307-2b327d0b3914 service nova] Acquiring lock "2f710439-0216-401e-9759-af584f9bd00d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.657366] env[69994]: DEBUG oslo_concurrency.lockutils [req-21584eab-c6a2-46c3-bdb0-0b25710791d1 req-65ec683d-fc9d-4fce-a307-2b327d0b3914 service nova] Lock "2f710439-0216-401e-9759-af584f9bd00d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 631.658034] env[69994]: DEBUG oslo_concurrency.lockutils [req-21584eab-c6a2-46c3-bdb0-0b25710791d1 req-65ec683d-fc9d-4fce-a307-2b327d0b3914 service nova] Lock "2f710439-0216-401e-9759-af584f9bd00d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 631.658034] env[69994]: DEBUG nova.compute.manager [req-21584eab-c6a2-46c3-bdb0-0b25710791d1 req-65ec683d-fc9d-4fce-a307-2b327d0b3914 service nova] [instance: 2f710439-0216-401e-9759-af584f9bd00d] No waiting events found dispatching network-vif-plugged-b3347b62-0c9c-4b6c-8d07-587f2423850c {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 631.658034] env[69994]: WARNING nova.compute.manager [req-21584eab-c6a2-46c3-bdb0-0b25710791d1 req-65ec683d-fc9d-4fce-a307-2b327d0b3914 service nova] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Received unexpected event network-vif-plugged-b3347b62-0c9c-4b6c-8d07-587f2423850c for instance with vm_state building and task_state spawning. [ 631.729828] env[69994]: DEBUG nova.network.neutron [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Successfully updated port: a16b9fc2-06de-47cb-b39f-b77130ed0eec {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 631.772033] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241265, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.927293] env[69994]: DEBUG oslo_concurrency.lockutils [None req-78c4039c-3cbb-4a63-b2cf-2df8fc79ea48 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Acquiring lock "refresh_cache-f3945280-ee10-426b-bcab-3e52e8779c55" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 631.927483] env[69994]: DEBUG oslo_concurrency.lockutils [None req-78c4039c-3cbb-4a63-b2cf-2df8fc79ea48 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Acquired lock "refresh_cache-f3945280-ee10-426b-bcab-3e52e8779c55" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 631.927662] env[69994]: DEBUG nova.network.neutron [None req-78c4039c-3cbb-4a63-b2cf-2df8fc79ea48 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 632.018040] env[69994]: DEBUG oslo_vmware.api [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52ce59c3-d46a-fbe3-4a39-bd49f4886ee2, 'name': SearchDatastore_Task, 'duration_secs': 0.010723} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.021180] env[69994]: DEBUG oslo_concurrency.lockutils [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 632.021469] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 53a8714c-50f7-4990-a3d9-86f8fc908d03/53a8714c-50f7-4990-a3d9-86f8fc908d03.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 632.022460] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e84c81ee-2ad6-4e39-af3e-bcbb9251bcbf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.033704] env[69994]: DEBUG oslo_vmware.api [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Waiting for the task: (returnval){ [ 632.033704] env[69994]: value = "task-3241266" [ 632.033704] env[69994]: _type = "Task" [ 632.033704] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.042652] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eed3b1c5-3d65-45d2-8efa-ad1fbe18e3ca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.052989] env[69994]: DEBUG oslo_vmware.api [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Task: {'id': task-3241266, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.056193] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57da0958-b371-4095-a561-f684d379a780 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.097473] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebdef4f4-990f-4eca-b49a-6811ca8a5801 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.106650] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39d7eff3-7501-42b2-9075-f7e7becfc3c8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.123613] env[69994]: DEBUG nova.compute.provider_tree [None req-97e070b9-9cda-4ffc-acc2-5c09551d9cad tempest-DeleteServersAdminTestJSON-1685494246 tempest-DeleteServersAdminTestJSON-1685494246-project-admin] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 632.152523] env[69994]: DEBUG nova.compute.manager [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 632.156353] env[69994]: DEBUG oslo_vmware.api [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241259, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.234872] env[69994]: DEBUG oslo_concurrency.lockutils [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Acquiring lock "refresh_cache-b003b7c2-e754-440e-8a65-13c5e9c68cd5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.235087] env[69994]: DEBUG oslo_concurrency.lockutils [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Acquired lock "refresh_cache-b003b7c2-e754-440e-8a65-13c5e9c68cd5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 632.235246] env[69994]: DEBUG nova.network.neutron [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 632.272268] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241265, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.450272] env[69994]: DEBUG nova.network.neutron [None req-78c4039c-3cbb-4a63-b2cf-2df8fc79ea48 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 632.492047] env[69994]: DEBUG nova.compute.manager [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Stashing vm_state: active {{(pid=69994) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 632.545394] env[69994]: DEBUG oslo_vmware.api [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Task: {'id': task-3241266, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.627634] env[69994]: DEBUG nova.scheduler.client.report [None req-97e070b9-9cda-4ffc-acc2-5c09551d9cad tempest-DeleteServersAdminTestJSON-1685494246 tempest-DeleteServersAdminTestJSON-1685494246-project-admin] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 632.652946] env[69994]: DEBUG oslo_vmware.api [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241259, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.727267] env[69994]: DEBUG oslo_concurrency.lockutils [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Acquiring lock "84bff4c0-9e2e-47f2-a378-70d3c992b58b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 632.727267] env[69994]: DEBUG oslo_concurrency.lockutils [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Lock "84bff4c0-9e2e-47f2-a378-70d3c992b58b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 632.727267] env[69994]: DEBUG nova.network.neutron [None req-78c4039c-3cbb-4a63-b2cf-2df8fc79ea48 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 632.772591] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241265, 'name': CreateVM_Task, 'duration_secs': 1.424307} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.772879] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 632.774125] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.774125] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 632.774125] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 632.774292] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ce54430-e901-40b8-8a82-bedb22a9953a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.779852] env[69994]: DEBUG oslo_vmware.api [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Waiting for the task: (returnval){ [ 632.779852] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]527aa103-530d-9f59-883c-0e7117b5ac47" [ 632.779852] env[69994]: _type = "Task" [ 632.779852] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.792247] env[69994]: DEBUG oslo_vmware.api [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]527aa103-530d-9f59-883c-0e7117b5ac47, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.795399] env[69994]: DEBUG nova.network.neutron [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 632.948048] env[69994]: DEBUG nova.network.neutron [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Updating instance_info_cache with network_info: [{"id": "a16b9fc2-06de-47cb-b39f-b77130ed0eec", "address": "fa:16:3e:45:f4:b2", "network": {"id": "e6b2a322-ac01-400c-a8ec-68a371b2061c", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-141203958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e8b8ab56b87c46f9b960fc3b430197d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa16b9fc2-06", "ovs_interfaceid": "a16b9fc2-06de-47cb-b39f-b77130ed0eec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 633.025528] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 633.048649] env[69994]: DEBUG oslo_vmware.api [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Task: {'id': task-3241266, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.894222} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.050285] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 53a8714c-50f7-4990-a3d9-86f8fc908d03/53a8714c-50f7-4990-a3d9-86f8fc908d03.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 633.050285] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 633.051225] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3f52b452-ed9c-4a8a-99a4-bb1051c8d05a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.060684] env[69994]: DEBUG oslo_vmware.api [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Waiting for the task: (returnval){ [ 633.060684] env[69994]: value = "task-3241267" [ 633.060684] env[69994]: _type = "Task" [ 633.060684] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.071026] env[69994]: DEBUG oslo_vmware.api [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Task: {'id': task-3241267, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.134848] env[69994]: DEBUG oslo_concurrency.lockutils [None req-97e070b9-9cda-4ffc-acc2-5c09551d9cad tempest-DeleteServersAdminTestJSON-1685494246 tempest-DeleteServersAdminTestJSON-1685494246-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.003s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 633.137914] env[69994]: DEBUG oslo_concurrency.lockutils [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.066s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 633.140077] env[69994]: INFO nova.compute.claims [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 633.155833] env[69994]: DEBUG oslo_vmware.api [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241259, 'name': PowerOnVM_Task, 'duration_secs': 4.122582} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.156113] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 633.156318] env[69994]: INFO nova.compute.manager [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Took 9.88 seconds to spawn the instance on the hypervisor. [ 633.157044] env[69994]: DEBUG nova.compute.manager [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 633.157369] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c31662e4-7b1f-4cf7-b1e4-cd50741ed00c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.165655] env[69994]: DEBUG nova.compute.manager [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 633.179062] env[69994]: INFO nova.scheduler.client.report [None req-97e070b9-9cda-4ffc-acc2-5c09551d9cad tempest-DeleteServersAdminTestJSON-1685494246 tempest-DeleteServersAdminTestJSON-1685494246-project-admin] Deleted allocations for instance 2e374549-00a2-4014-90e0-ceccbe4360fa [ 633.209890] env[69994]: DEBUG nova.virt.hardware [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 633.210176] env[69994]: DEBUG nova.virt.hardware [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 633.210341] env[69994]: DEBUG nova.virt.hardware [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 633.210522] env[69994]: DEBUG nova.virt.hardware [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 633.210664] env[69994]: DEBUG nova.virt.hardware [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 633.210810] env[69994]: DEBUG nova.virt.hardware [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 633.212372] env[69994]: DEBUG nova.virt.hardware [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 633.212372] env[69994]: DEBUG nova.virt.hardware [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 633.212372] env[69994]: DEBUG nova.virt.hardware [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 633.212580] env[69994]: DEBUG nova.virt.hardware [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 633.212774] env[69994]: DEBUG nova.virt.hardware [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 633.213978] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4eee83d-5905-4bcc-9dd4-4a6da9be3172 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.226407] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-135aead2-c658-4ded-aaa3-4e31471b344f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.229807] env[69994]: DEBUG oslo_concurrency.lockutils [None req-78c4039c-3cbb-4a63-b2cf-2df8fc79ea48 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Releasing lock "refresh_cache-f3945280-ee10-426b-bcab-3e52e8779c55" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 633.230219] env[69994]: DEBUG nova.compute.manager [None req-78c4039c-3cbb-4a63-b2cf-2df8fc79ea48 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 633.230896] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-78c4039c-3cbb-4a63-b2cf-2df8fc79ea48 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 633.231854] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f8af01c-fec5-4e86-9632-ac2925cd3454 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.244944] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Instance VIF info [] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 633.251251] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Creating folder: Project (903720648a8d4a47a6d294124d2ed611). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 633.252850] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-975f44ee-0d4c-4748-a014-fd079d498d76 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.258051] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-78c4039c-3cbb-4a63-b2cf-2df8fc79ea48 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 633.258051] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d1060286-482a-4722-abec-b3dd627f0a83 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.265224] env[69994]: DEBUG oslo_vmware.api [None req-78c4039c-3cbb-4a63-b2cf-2df8fc79ea48 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Waiting for the task: (returnval){ [ 633.265224] env[69994]: value = "task-3241269" [ 633.265224] env[69994]: _type = "Task" [ 633.265224] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.266985] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Created folder: Project (903720648a8d4a47a6d294124d2ed611) in parent group-v647729. [ 633.267229] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Creating folder: Instances. Parent ref: group-v647761. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 633.272328] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6de4bf99-64e1-435d-b9e3-acdb1013cd92 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.278492] env[69994]: DEBUG oslo_vmware.api [None req-78c4039c-3cbb-4a63-b2cf-2df8fc79ea48 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Task: {'id': task-3241269, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.281076] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Created folder: Instances in parent group-v647761. [ 633.281353] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 633.284495] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 633.285089] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f75cf7b3-6dea-4a9d-9039-361bcd7d4e7e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.305361] env[69994]: DEBUG oslo_vmware.api [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]527aa103-530d-9f59-883c-0e7117b5ac47, 'name': SearchDatastore_Task, 'duration_secs': 0.057682} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.306774] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 633.307513] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 633.307513] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 633.307642] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 633.307831] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 633.308138] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 633.308138] env[69994]: value = "task-3241271" [ 633.308138] env[69994]: _type = "Task" [ 633.308138] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.308386] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ff619952-8fef-40d1-9f55-e844ccb25735 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.320161] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241271, 'name': CreateVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.349827] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 633.349967] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 633.350749] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4992668-36de-4592-bcc6-4b49f2d25f4c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.356629] env[69994]: DEBUG oslo_vmware.api [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Waiting for the task: (returnval){ [ 633.356629] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]522b59c5-b0a9-10ff-b906-015e325eb68d" [ 633.356629] env[69994]: _type = "Task" [ 633.356629] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.365499] env[69994]: DEBUG oslo_vmware.api [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]522b59c5-b0a9-10ff-b906-015e325eb68d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.451156] env[69994]: DEBUG oslo_concurrency.lockutils [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Releasing lock "refresh_cache-b003b7c2-e754-440e-8a65-13c5e9c68cd5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 633.451534] env[69994]: DEBUG nova.compute.manager [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Instance network_info: |[{"id": "a16b9fc2-06de-47cb-b39f-b77130ed0eec", "address": "fa:16:3e:45:f4:b2", "network": {"id": "e6b2a322-ac01-400c-a8ec-68a371b2061c", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-141203958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e8b8ab56b87c46f9b960fc3b430197d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa16b9fc2-06", "ovs_interfaceid": "a16b9fc2-06de-47cb-b39f-b77130ed0eec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 633.451976] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:f4:b2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '415e68b4-3766-4359-afe2-f8563910d98c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a16b9fc2-06de-47cb-b39f-b77130ed0eec', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 633.463615] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Creating folder: Project (e8b8ab56b87c46f9b960fc3b430197d3). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 633.463741] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cfb30ff8-869f-417a-af31-9445470aa075 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.474598] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Created folder: Project (e8b8ab56b87c46f9b960fc3b430197d3) in parent group-v647729. [ 633.474598] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Creating folder: Instances. Parent ref: group-v647764. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 633.474980] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0eacce22-9278-49a9-ab4d-c6299d50e628 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.484921] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Created folder: Instances in parent group-v647764. [ 633.485225] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 633.485491] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 633.485619] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a640e876-4d9b-4e8a-8b76-4f1b2263ffa2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.505661] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 633.505661] env[69994]: value = "task-3241274" [ 633.505661] env[69994]: _type = "Task" [ 633.505661] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.513537] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241274, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.571048] env[69994]: DEBUG oslo_vmware.api [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Task: {'id': task-3241267, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.170761} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.571305] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 633.572155] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1802082c-17c6-4907-8c8b-a2816d162435 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.606340] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Reconfiguring VM instance instance-0000000a to attach disk [datastore2] 53a8714c-50f7-4990-a3d9-86f8fc908d03/53a8714c-50f7-4990-a3d9-86f8fc908d03.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 633.606469] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3952da26-f0ab-42b9-a2cc-c74747dc7d83 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.636063] env[69994]: DEBUG oslo_vmware.api [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Waiting for the task: (returnval){ [ 633.636063] env[69994]: value = "task-3241275" [ 633.636063] env[69994]: _type = "Task" [ 633.636063] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.649993] env[69994]: DEBUG oslo_vmware.api [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Task: {'id': task-3241275, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.695444] env[69994]: INFO nova.compute.manager [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Took 27.44 seconds to build instance. [ 633.706537] env[69994]: DEBUG oslo_concurrency.lockutils [None req-97e070b9-9cda-4ffc-acc2-5c09551d9cad tempest-DeleteServersAdminTestJSON-1685494246 tempest-DeleteServersAdminTestJSON-1685494246-project-admin] Lock "2e374549-00a2-4014-90e0-ceccbe4360fa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.363s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 633.779745] env[69994]: DEBUG oslo_vmware.api [None req-78c4039c-3cbb-4a63-b2cf-2df8fc79ea48 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Task: {'id': task-3241269, 'name': PowerOffVM_Task, 'duration_secs': 0.322692} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.779745] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-78c4039c-3cbb-4a63-b2cf-2df8fc79ea48 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 633.780084] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-78c4039c-3cbb-4a63-b2cf-2df8fc79ea48 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 633.780084] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9ff2aaa5-e9fe-4ca4-b239-41170a17af62 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.806930] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-78c4039c-3cbb-4a63-b2cf-2df8fc79ea48 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 633.807235] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-78c4039c-3cbb-4a63-b2cf-2df8fc79ea48 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 633.807382] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-78c4039c-3cbb-4a63-b2cf-2df8fc79ea48 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Deleting the datastore file [datastore2] f3945280-ee10-426b-bcab-3e52e8779c55 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 633.807652] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0c142123-4c8a-49a7-a8f2-39434fecb646 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.815592] env[69994]: DEBUG oslo_vmware.api [None req-78c4039c-3cbb-4a63-b2cf-2df8fc79ea48 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Waiting for the task: (returnval){ [ 633.815592] env[69994]: value = "task-3241277" [ 633.815592] env[69994]: _type = "Task" [ 633.815592] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.824304] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241271, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.829333] env[69994]: DEBUG oslo_vmware.api [None req-78c4039c-3cbb-4a63-b2cf-2df8fc79ea48 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Task: {'id': task-3241277, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.868869] env[69994]: DEBUG oslo_vmware.api [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]522b59c5-b0a9-10ff-b906-015e325eb68d, 'name': SearchDatastore_Task, 'duration_secs': 0.028126} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.869845] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e2ea0c9-ce82-497e-b3f3-23b653768f98 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.876046] env[69994]: DEBUG oslo_vmware.api [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Waiting for the task: (returnval){ [ 633.876046] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]528d72ed-53f5-e4cb-43c7-3c9a7dc10897" [ 633.876046] env[69994]: _type = "Task" [ 633.876046] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.885241] env[69994]: DEBUG oslo_vmware.api [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]528d72ed-53f5-e4cb-43c7-3c9a7dc10897, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.895240] env[69994]: DEBUG nova.compute.manager [req-6983297a-339f-4423-b137-b4a9920fa286 req-cfd4cfb2-1c11-467a-815f-6d30d99c7076 service nova] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Received event network-changed-be9f669d-36ab-4cbd-a56f-5db33a833aa8 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 633.895240] env[69994]: DEBUG nova.compute.manager [req-6983297a-339f-4423-b137-b4a9920fa286 req-cfd4cfb2-1c11-467a-815f-6d30d99c7076 service nova] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Refreshing instance network info cache due to event network-changed-be9f669d-36ab-4cbd-a56f-5db33a833aa8. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 633.895464] env[69994]: DEBUG oslo_concurrency.lockutils [req-6983297a-339f-4423-b137-b4a9920fa286 req-cfd4cfb2-1c11-467a-815f-6d30d99c7076 service nova] Acquiring lock "refresh_cache-53a8714c-50f7-4990-a3d9-86f8fc908d03" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 633.895605] env[69994]: DEBUG oslo_concurrency.lockutils [req-6983297a-339f-4423-b137-b4a9920fa286 req-cfd4cfb2-1c11-467a-815f-6d30d99c7076 service nova] Acquired lock "refresh_cache-53a8714c-50f7-4990-a3d9-86f8fc908d03" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 633.895761] env[69994]: DEBUG nova.network.neutron [req-6983297a-339f-4423-b137-b4a9920fa286 req-cfd4cfb2-1c11-467a-815f-6d30d99c7076 service nova] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Refreshing network info cache for port be9f669d-36ab-4cbd-a56f-5db33a833aa8 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 634.016768] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241274, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.146147] env[69994]: DEBUG oslo_vmware.api [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Task: {'id': task-3241275, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.196500] env[69994]: DEBUG oslo_concurrency.lockutils [None req-948c0d24-1dca-4889-8274-7e6f63bd5c70 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Acquiring lock "6ca85dc6-ace9-4c5e-a11e-a3d5060d766b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 634.198504] env[69994]: DEBUG oslo_concurrency.lockutils [None req-948c0d24-1dca-4889-8274-7e6f63bd5c70 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Lock "6ca85dc6-ace9-4c5e-a11e-a3d5060d766b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 634.198504] env[69994]: DEBUG oslo_concurrency.lockutils [None req-948c0d24-1dca-4889-8274-7e6f63bd5c70 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Acquiring lock "6ca85dc6-ace9-4c5e-a11e-a3d5060d766b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 634.198504] env[69994]: DEBUG oslo_concurrency.lockutils [None req-948c0d24-1dca-4889-8274-7e6f63bd5c70 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Lock "6ca85dc6-ace9-4c5e-a11e-a3d5060d766b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 634.198504] env[69994]: DEBUG oslo_concurrency.lockutils [None req-948c0d24-1dca-4889-8274-7e6f63bd5c70 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Lock "6ca85dc6-ace9-4c5e-a11e-a3d5060d766b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 634.200656] env[69994]: DEBUG oslo_concurrency.lockutils [None req-632fade7-5bd3-45c1-a411-8f6c83af5807 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Lock "84efe900-1d79-42f9-b3c6-54299757cdbc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.957s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 634.200785] env[69994]: INFO nova.compute.manager [None req-948c0d24-1dca-4889-8274-7e6f63bd5c70 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Terminating instance [ 634.307580] env[69994]: DEBUG nova.compute.manager [req-b3af719d-42d3-4310-810c-b1685e933c3c req-bbf22133-7fd4-4d6c-9bd7-92e102e120b6 service nova] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Received event network-changed-b3347b62-0c9c-4b6c-8d07-587f2423850c {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 634.307580] env[69994]: DEBUG nova.compute.manager [req-b3af719d-42d3-4310-810c-b1685e933c3c req-bbf22133-7fd4-4d6c-9bd7-92e102e120b6 service nova] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Refreshing instance network info cache due to event network-changed-b3347b62-0c9c-4b6c-8d07-587f2423850c. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 634.307580] env[69994]: DEBUG oslo_concurrency.lockutils [req-b3af719d-42d3-4310-810c-b1685e933c3c req-bbf22133-7fd4-4d6c-9bd7-92e102e120b6 service nova] Acquiring lock "refresh_cache-2f710439-0216-401e-9759-af584f9bd00d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.307580] env[69994]: DEBUG oslo_concurrency.lockutils [req-b3af719d-42d3-4310-810c-b1685e933c3c req-bbf22133-7fd4-4d6c-9bd7-92e102e120b6 service nova] Acquired lock "refresh_cache-2f710439-0216-401e-9759-af584f9bd00d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 634.307580] env[69994]: DEBUG nova.network.neutron [req-b3af719d-42d3-4310-810c-b1685e933c3c req-bbf22133-7fd4-4d6c-9bd7-92e102e120b6 service nova] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Refreshing network info cache for port b3347b62-0c9c-4b6c-8d07-587f2423850c {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 634.326179] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241271, 'name': CreateVM_Task, 'duration_secs': 0.651681} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.327047] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 634.327475] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.327631] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 634.327948] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 634.328467] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de1e93e4-63ec-4fea-a76b-b5084052c4f2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.333551] env[69994]: DEBUG oslo_vmware.api [None req-78c4039c-3cbb-4a63-b2cf-2df8fc79ea48 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Task: {'id': task-3241277, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170046} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.334136] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-78c4039c-3cbb-4a63-b2cf-2df8fc79ea48 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 634.334340] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-78c4039c-3cbb-4a63-b2cf-2df8fc79ea48 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 634.334525] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-78c4039c-3cbb-4a63-b2cf-2df8fc79ea48 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 634.334691] env[69994]: INFO nova.compute.manager [None req-78c4039c-3cbb-4a63-b2cf-2df8fc79ea48 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Took 1.10 seconds to destroy the instance on the hypervisor. [ 634.334975] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-78c4039c-3cbb-4a63-b2cf-2df8fc79ea48 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 634.335178] env[69994]: DEBUG nova.compute.manager [-] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 634.335427] env[69994]: DEBUG nova.network.neutron [-] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 634.338191] env[69994]: DEBUG oslo_vmware.api [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Waiting for the task: (returnval){ [ 634.338191] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52ba07d8-6e2e-0f57-ab5d-e0cd599b5d27" [ 634.338191] env[69994]: _type = "Task" [ 634.338191] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.349583] env[69994]: DEBUG oslo_vmware.api [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52ba07d8-6e2e-0f57-ab5d-e0cd599b5d27, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.353295] env[69994]: DEBUG nova.network.neutron [-] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 634.389610] env[69994]: DEBUG oslo_vmware.api [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]528d72ed-53f5-e4cb-43c7-3c9a7dc10897, 'name': SearchDatastore_Task, 'duration_secs': 0.013308} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.389996] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 634.390288] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 2f710439-0216-401e-9759-af584f9bd00d/2f710439-0216-401e-9759-af584f9bd00d.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 634.390543] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-471691ee-7c17-45cc-af7b-d9cb3638d198 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.397746] env[69994]: DEBUG oslo_vmware.api [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Waiting for the task: (returnval){ [ 634.397746] env[69994]: value = "task-3241278" [ 634.397746] env[69994]: _type = "Task" [ 634.397746] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.409586] env[69994]: DEBUG oslo_vmware.api [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Task: {'id': task-3241278, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.524431] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241274, 'name': CreateVM_Task, 'duration_secs': 0.511696} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.524606] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 634.525434] env[69994]: DEBUG oslo_concurrency.lockutils [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.534022] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0047e12a-fd90-42d9-8e1b-de3c460ac743 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.540821] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-013bd6bb-feb8-4934-9867-3ea99fc7d4bd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.574355] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9afc97b8-1769-4df2-996a-30efa6493444 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.581643] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47f7d10c-1287-4a36-bde6-302c75e23147 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.597662] env[69994]: DEBUG nova.compute.provider_tree [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 634.648836] env[69994]: DEBUG oslo_vmware.api [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Task: {'id': task-3241275, 'name': ReconfigVM_Task, 'duration_secs': 0.576683} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.649138] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Reconfigured VM instance instance-0000000a to attach disk [datastore2] 53a8714c-50f7-4990-a3d9-86f8fc908d03/53a8714c-50f7-4990-a3d9-86f8fc908d03.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 634.649760] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-04bb5d0d-d7cc-4fce-947f-e90e240f2b72 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.657562] env[69994]: DEBUG oslo_vmware.api [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Waiting for the task: (returnval){ [ 634.657562] env[69994]: value = "task-3241279" [ 634.657562] env[69994]: _type = "Task" [ 634.657562] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.672198] env[69994]: DEBUG oslo_vmware.api [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Task: {'id': task-3241279, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.710261] env[69994]: DEBUG nova.compute.manager [None req-948c0d24-1dca-4889-8274-7e6f63bd5c70 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 634.710345] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-948c0d24-1dca-4889-8274-7e6f63bd5c70 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 634.711552] env[69994]: DEBUG nova.compute.manager [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 634.714890] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c0a2ca6-8967-493c-a583-5845762213cd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.722925] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-948c0d24-1dca-4889-8274-7e6f63bd5c70 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 634.723071] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-652e608e-0b6a-4c5c-9140-a4c85b5e6a42 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.734601] env[69994]: DEBUG oslo_vmware.api [None req-948c0d24-1dca-4889-8274-7e6f63bd5c70 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Waiting for the task: (returnval){ [ 634.734601] env[69994]: value = "task-3241280" [ 634.734601] env[69994]: _type = "Task" [ 634.734601] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.742476] env[69994]: DEBUG nova.network.neutron [req-6983297a-339f-4423-b137-b4a9920fa286 req-cfd4cfb2-1c11-467a-815f-6d30d99c7076 service nova] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Updated VIF entry in instance network info cache for port be9f669d-36ab-4cbd-a56f-5db33a833aa8. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 634.742476] env[69994]: DEBUG nova.network.neutron [req-6983297a-339f-4423-b137-b4a9920fa286 req-cfd4cfb2-1c11-467a-815f-6d30d99c7076 service nova] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Updating instance_info_cache with network_info: [{"id": "be9f669d-36ab-4cbd-a56f-5db33a833aa8", "address": "fa:16:3e:e2:6b:09", "network": {"id": "309c6f91-53cb-40c8-a685-71dd7afb097b", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-898903642-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3405ff9c38dd46ba98df2d9a835ed860", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b9aabc7c-0f6c-42eb-bd27-493a1496c0c8", "external-id": "nsx-vlan-transportzone-368", "segmentation_id": 368, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe9f669d-36", "ovs_interfaceid": "be9f669d-36ab-4cbd-a56f-5db33a833aa8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.749019] env[69994]: DEBUG oslo_vmware.api [None req-948c0d24-1dca-4889-8274-7e6f63bd5c70 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Task: {'id': task-3241280, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.850268] env[69994]: DEBUG oslo_vmware.api [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52ba07d8-6e2e-0f57-ab5d-e0cd599b5d27, 'name': SearchDatastore_Task, 'duration_secs': 0.02696} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.850739] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 634.850995] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 634.851275] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.851423] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 634.851617] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 634.852089] env[69994]: DEBUG oslo_concurrency.lockutils [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 634.852249] env[69994]: DEBUG oslo_concurrency.lockutils [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 634.852503] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-95082b51-03a2-4a62-b8a5-db672c3b28fa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.854338] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94933857-c425-4e25-89e5-bbe74670f9a7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.858915] env[69994]: DEBUG nova.network.neutron [-] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.863359] env[69994]: DEBUG oslo_vmware.api [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Waiting for the task: (returnval){ [ 634.863359] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52de7e09-6168-0434-b87f-88432a9626aa" [ 634.863359] env[69994]: _type = "Task" [ 634.863359] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.869636] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 634.869828] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 634.874418] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-32ad04e6-d164-49f1-a171-e95e4cd26296 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.883891] env[69994]: DEBUG oslo_vmware.api [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Waiting for the task: (returnval){ [ 634.883891] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52fdb4b0-7d9f-61c8-60a2-c115ebe3c818" [ 634.883891] env[69994]: _type = "Task" [ 634.883891] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.885515] env[69994]: DEBUG oslo_vmware.api [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52de7e09-6168-0434-b87f-88432a9626aa, 'name': SearchDatastore_Task, 'duration_secs': 0.010646} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.888919] env[69994]: DEBUG oslo_concurrency.lockutils [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 634.889198] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 634.889403] env[69994]: DEBUG oslo_concurrency.lockutils [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.897250] env[69994]: DEBUG oslo_vmware.api [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52fdb4b0-7d9f-61c8-60a2-c115ebe3c818, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.907305] env[69994]: DEBUG oslo_vmware.api [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Task: {'id': task-3241278, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.099761] env[69994]: DEBUG nova.scheduler.client.report [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 635.146702] env[69994]: DEBUG nova.network.neutron [req-b3af719d-42d3-4310-810c-b1685e933c3c req-bbf22133-7fd4-4d6c-9bd7-92e102e120b6 service nova] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Updated VIF entry in instance network info cache for port b3347b62-0c9c-4b6c-8d07-587f2423850c. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 635.147113] env[69994]: DEBUG nova.network.neutron [req-b3af719d-42d3-4310-810c-b1685e933c3c req-bbf22133-7fd4-4d6c-9bd7-92e102e120b6 service nova] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Updating instance_info_cache with network_info: [{"id": "b3347b62-0c9c-4b6c-8d07-587f2423850c", "address": "fa:16:3e:2c:18:d7", "network": {"id": "ac661ad7-377f-4b70-9be6-97e7a77207b9", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1514924772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7f33906db9fd416884267f628a3f05ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3347b62-0c", "ovs_interfaceid": "b3347b62-0c9c-4b6c-8d07-587f2423850c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.169191] env[69994]: DEBUG oslo_vmware.api [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Task: {'id': task-3241279, 'name': Rename_Task, 'duration_secs': 0.198977} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.169630] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 635.170669] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7aa30d35-2341-476f-baab-f26c5609c221 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.179774] env[69994]: DEBUG oslo_vmware.api [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Waiting for the task: (returnval){ [ 635.179774] env[69994]: value = "task-3241281" [ 635.179774] env[69994]: _type = "Task" [ 635.179774] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.190431] env[69994]: DEBUG oslo_vmware.api [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Task: {'id': task-3241281, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.243860] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Acquiring lock "cef66a67-e3ac-40dc-a8a4-0375bd64c484" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 635.244306] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Lock "cef66a67-e3ac-40dc-a8a4-0375bd64c484" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 635.245634] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 635.246132] env[69994]: DEBUG oslo_concurrency.lockutils [req-6983297a-339f-4423-b137-b4a9920fa286 req-cfd4cfb2-1c11-467a-815f-6d30d99c7076 service nova] Releasing lock "refresh_cache-53a8714c-50f7-4990-a3d9-86f8fc908d03" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 635.246332] env[69994]: DEBUG nova.compute.manager [req-6983297a-339f-4423-b137-b4a9920fa286 req-cfd4cfb2-1c11-467a-815f-6d30d99c7076 service nova] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Received event network-changed-239a38e8-39e8-487b-ba99-cf85c99d41f1 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 635.246499] env[69994]: DEBUG nova.compute.manager [req-6983297a-339f-4423-b137-b4a9920fa286 req-cfd4cfb2-1c11-467a-815f-6d30d99c7076 service nova] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Refreshing instance network info cache due to event network-changed-239a38e8-39e8-487b-ba99-cf85c99d41f1. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 635.246690] env[69994]: DEBUG oslo_concurrency.lockutils [req-6983297a-339f-4423-b137-b4a9920fa286 req-cfd4cfb2-1c11-467a-815f-6d30d99c7076 service nova] Acquiring lock "refresh_cache-15d17772-ac57-49a3-b261-bf49b902f658" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 635.246858] env[69994]: DEBUG oslo_concurrency.lockutils [req-6983297a-339f-4423-b137-b4a9920fa286 req-cfd4cfb2-1c11-467a-815f-6d30d99c7076 service nova] Acquired lock "refresh_cache-15d17772-ac57-49a3-b261-bf49b902f658" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 635.247059] env[69994]: DEBUG nova.network.neutron [req-6983297a-339f-4423-b137-b4a9920fa286 req-cfd4cfb2-1c11-467a-815f-6d30d99c7076 service nova] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Refreshing network info cache for port 239a38e8-39e8-487b-ba99-cf85c99d41f1 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 635.255146] env[69994]: DEBUG oslo_vmware.api [None req-948c0d24-1dca-4889-8274-7e6f63bd5c70 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Task: {'id': task-3241280, 'name': PowerOffVM_Task, 'duration_secs': 0.205651} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.255653] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-948c0d24-1dca-4889-8274-7e6f63bd5c70 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 635.256116] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-948c0d24-1dca-4889-8274-7e6f63bd5c70 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 635.256116] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5f4317c3-3087-42dd-ac4d-30e5732c6c04 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.336419] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-948c0d24-1dca-4889-8274-7e6f63bd5c70 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 635.336647] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-948c0d24-1dca-4889-8274-7e6f63bd5c70 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 635.336863] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-948c0d24-1dca-4889-8274-7e6f63bd5c70 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Deleting the datastore file [datastore1] 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 635.337134] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5f490235-2f53-4fcf-866e-287ec10af96e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.346307] env[69994]: DEBUG oslo_vmware.api [None req-948c0d24-1dca-4889-8274-7e6f63bd5c70 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Waiting for the task: (returnval){ [ 635.346307] env[69994]: value = "task-3241283" [ 635.346307] env[69994]: _type = "Task" [ 635.346307] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.355227] env[69994]: DEBUG oslo_vmware.api [None req-948c0d24-1dca-4889-8274-7e6f63bd5c70 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Task: {'id': task-3241283, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.371824] env[69994]: INFO nova.compute.manager [-] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Took 1.04 seconds to deallocate network for instance. [ 635.395806] env[69994]: DEBUG oslo_vmware.api [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52fdb4b0-7d9f-61c8-60a2-c115ebe3c818, 'name': SearchDatastore_Task, 'duration_secs': 0.018808} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.396976] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7393c450-1a2f-4453-b91c-1aeb6074e705 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.408927] env[69994]: DEBUG oslo_vmware.api [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Waiting for the task: (returnval){ [ 635.408927] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52dd624c-3ae5-c23c-4b44-7ddadf0569d8" [ 635.408927] env[69994]: _type = "Task" [ 635.408927] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.413547] env[69994]: DEBUG oslo_vmware.api [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Task: {'id': task-3241278, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.925466} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.417261] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 2f710439-0216-401e-9759-af584f9bd00d/2f710439-0216-401e-9759-af584f9bd00d.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 635.418439] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 635.418439] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3eb308c9-3e89-4b3f-b6c0-e9b1974a8144 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.427468] env[69994]: DEBUG oslo_vmware.api [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52dd624c-3ae5-c23c-4b44-7ddadf0569d8, 'name': SearchDatastore_Task, 'duration_secs': 0.012309} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.430194] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 635.430726] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 21f66039-6292-4d9c-b97d-668d029def24/21f66039-6292-4d9c-b97d-668d029def24.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 635.431726] env[69994]: DEBUG oslo_vmware.api [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Waiting for the task: (returnval){ [ 635.431726] env[69994]: value = "task-3241284" [ 635.431726] env[69994]: _type = "Task" [ 635.431726] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.431880] env[69994]: DEBUG oslo_concurrency.lockutils [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 635.432143] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 635.432788] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-99de26ee-2d25-4115-b785-bf29985e68ab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.436401] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7d2f95fd-a53c-40c0-a133-3fba8b6eb4d2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.449411] env[69994]: DEBUG oslo_vmware.api [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Task: {'id': task-3241284, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.452809] env[69994]: DEBUG oslo_vmware.api [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Waiting for the task: (returnval){ [ 635.452809] env[69994]: value = "task-3241285" [ 635.452809] env[69994]: _type = "Task" [ 635.452809] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.452809] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 635.453043] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 635.453775] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30a1bd7e-2094-48d7-8723-6a7cc6d3c54f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.462874] env[69994]: DEBUG oslo_vmware.api [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Waiting for the task: (returnval){ [ 635.462874] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52539365-cd34-6e4a-5ab3-7e0afa5b3214" [ 635.462874] env[69994]: _type = "Task" [ 635.462874] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.466230] env[69994]: DEBUG oslo_vmware.api [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': task-3241285, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.475436] env[69994]: DEBUG oslo_vmware.api [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52539365-cd34-6e4a-5ab3-7e0afa5b3214, 'name': SearchDatastore_Task, 'duration_secs': 0.009656} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.476342] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be34b4b6-b11b-469a-b885-cfdd6c1b46c0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.481751] env[69994]: DEBUG oslo_vmware.api [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Waiting for the task: (returnval){ [ 635.481751] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]528e2ee5-5784-73a4-3d94-acc5faa74cef" [ 635.481751] env[69994]: _type = "Task" [ 635.481751] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.490641] env[69994]: DEBUG oslo_vmware.api [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]528e2ee5-5784-73a4-3d94-acc5faa74cef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.610224] env[69994]: DEBUG oslo_concurrency.lockutils [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.472s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 635.610889] env[69994]: DEBUG nova.compute.manager [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 635.613959] env[69994]: DEBUG oslo_concurrency.lockutils [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.384s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 635.615526] env[69994]: INFO nova.compute.claims [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 635.650493] env[69994]: DEBUG oslo_concurrency.lockutils [req-b3af719d-42d3-4310-810c-b1685e933c3c req-bbf22133-7fd4-4d6c-9bd7-92e102e120b6 service nova] Releasing lock "refresh_cache-2f710439-0216-401e-9759-af584f9bd00d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 635.650981] env[69994]: DEBUG nova.compute.manager [req-b3af719d-42d3-4310-810c-b1685e933c3c req-bbf22133-7fd4-4d6c-9bd7-92e102e120b6 service nova] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Received event network-vif-plugged-a16b9fc2-06de-47cb-b39f-b77130ed0eec {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 635.651675] env[69994]: DEBUG oslo_concurrency.lockutils [req-b3af719d-42d3-4310-810c-b1685e933c3c req-bbf22133-7fd4-4d6c-9bd7-92e102e120b6 service nova] Acquiring lock "b003b7c2-e754-440e-8a65-13c5e9c68cd5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 635.651675] env[69994]: DEBUG oslo_concurrency.lockutils [req-b3af719d-42d3-4310-810c-b1685e933c3c req-bbf22133-7fd4-4d6c-9bd7-92e102e120b6 service nova] Lock "b003b7c2-e754-440e-8a65-13c5e9c68cd5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 635.651675] env[69994]: DEBUG oslo_concurrency.lockutils [req-b3af719d-42d3-4310-810c-b1685e933c3c req-bbf22133-7fd4-4d6c-9bd7-92e102e120b6 service nova] Lock "b003b7c2-e754-440e-8a65-13c5e9c68cd5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 635.651675] env[69994]: DEBUG nova.compute.manager [req-b3af719d-42d3-4310-810c-b1685e933c3c req-bbf22133-7fd4-4d6c-9bd7-92e102e120b6 service nova] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] No waiting events found dispatching network-vif-plugged-a16b9fc2-06de-47cb-b39f-b77130ed0eec {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 635.651859] env[69994]: WARNING nova.compute.manager [req-b3af719d-42d3-4310-810c-b1685e933c3c req-bbf22133-7fd4-4d6c-9bd7-92e102e120b6 service nova] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Received unexpected event network-vif-plugged-a16b9fc2-06de-47cb-b39f-b77130ed0eec for instance with vm_state building and task_state spawning. [ 635.651982] env[69994]: DEBUG nova.compute.manager [req-b3af719d-42d3-4310-810c-b1685e933c3c req-bbf22133-7fd4-4d6c-9bd7-92e102e120b6 service nova] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Received event network-changed-a16b9fc2-06de-47cb-b39f-b77130ed0eec {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 635.652281] env[69994]: DEBUG nova.compute.manager [req-b3af719d-42d3-4310-810c-b1685e933c3c req-bbf22133-7fd4-4d6c-9bd7-92e102e120b6 service nova] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Refreshing instance network info cache due to event network-changed-a16b9fc2-06de-47cb-b39f-b77130ed0eec. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 635.652525] env[69994]: DEBUG oslo_concurrency.lockutils [req-b3af719d-42d3-4310-810c-b1685e933c3c req-bbf22133-7fd4-4d6c-9bd7-92e102e120b6 service nova] Acquiring lock "refresh_cache-b003b7c2-e754-440e-8a65-13c5e9c68cd5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 635.652811] env[69994]: DEBUG oslo_concurrency.lockutils [req-b3af719d-42d3-4310-810c-b1685e933c3c req-bbf22133-7fd4-4d6c-9bd7-92e102e120b6 service nova] Acquired lock "refresh_cache-b003b7c2-e754-440e-8a65-13c5e9c68cd5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 635.652878] env[69994]: DEBUG nova.network.neutron [req-b3af719d-42d3-4310-810c-b1685e933c3c req-bbf22133-7fd4-4d6c-9bd7-92e102e120b6 service nova] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Refreshing network info cache for port a16b9fc2-06de-47cb-b39f-b77130ed0eec {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 635.692131] env[69994]: DEBUG oslo_vmware.api [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Task: {'id': task-3241281, 'name': PowerOnVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.858160] env[69994]: DEBUG oslo_vmware.api [None req-948c0d24-1dca-4889-8274-7e6f63bd5c70 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Task: {'id': task-3241283, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.253751} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.858160] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-948c0d24-1dca-4889-8274-7e6f63bd5c70 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 635.858462] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-948c0d24-1dca-4889-8274-7e6f63bd5c70 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 635.858500] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-948c0d24-1dca-4889-8274-7e6f63bd5c70 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 635.858644] env[69994]: INFO nova.compute.manager [None req-948c0d24-1dca-4889-8274-7e6f63bd5c70 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Took 1.15 seconds to destroy the instance on the hypervisor. [ 635.858885] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-948c0d24-1dca-4889-8274-7e6f63bd5c70 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 635.859088] env[69994]: DEBUG nova.compute.manager [-] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 635.859202] env[69994]: DEBUG nova.network.neutron [-] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 635.879369] env[69994]: DEBUG oslo_concurrency.lockutils [None req-78c4039c-3cbb-4a63-b2cf-2df8fc79ea48 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 635.897459] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Acquiring lock "7f66a148-86fe-4ddc-b8ed-6e6a306bbc24" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 635.897815] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "7f66a148-86fe-4ddc-b8ed-6e6a306bbc24" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 635.950613] env[69994]: DEBUG oslo_vmware.api [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Task: {'id': task-3241284, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073233} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.950951] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 635.951818] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-631778c9-0585-4a16-8112-fd2f48f697d6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.962955] env[69994]: DEBUG oslo_vmware.api [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': task-3241285, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.490583} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.975068] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 21f66039-6292-4d9c-b97d-668d029def24/21f66039-6292-4d9c-b97d-668d029def24.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 635.975387] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 635.985918] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Reconfiguring VM instance instance-0000000b to attach disk [datastore2] 2f710439-0216-401e-9759-af584f9bd00d/2f710439-0216-401e-9759-af584f9bd00d.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 635.988910] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e63657d3-fc41-4823-9c4a-d90cd5cf071e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.991100] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-17bf82a6-25dc-4115-9d7b-d7d537f144a6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.019655] env[69994]: DEBUG oslo_vmware.api [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]528e2ee5-5784-73a4-3d94-acc5faa74cef, 'name': SearchDatastore_Task, 'duration_secs': 0.009829} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.020678] env[69994]: DEBUG oslo_concurrency.lockutils [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 636.020923] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] b003b7c2-e754-440e-8a65-13c5e9c68cd5/b003b7c2-e754-440e-8a65-13c5e9c68cd5.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 636.021275] env[69994]: DEBUG oslo_vmware.api [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Waiting for the task: (returnval){ [ 636.021275] env[69994]: value = "task-3241287" [ 636.021275] env[69994]: _type = "Task" [ 636.021275] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.021490] env[69994]: DEBUG oslo_vmware.api [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Waiting for the task: (returnval){ [ 636.021490] env[69994]: value = "task-3241286" [ 636.021490] env[69994]: _type = "Task" [ 636.021490] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.022098] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d4284c81-8921-4154-a5d8-cff4311473f1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.035318] env[69994]: DEBUG oslo_vmware.api [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Task: {'id': task-3241287, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.039546] env[69994]: DEBUG oslo_vmware.api [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': task-3241286, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.039872] env[69994]: DEBUG oslo_vmware.api [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Waiting for the task: (returnval){ [ 636.039872] env[69994]: value = "task-3241288" [ 636.039872] env[69994]: _type = "Task" [ 636.039872] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.047949] env[69994]: DEBUG oslo_vmware.api [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3241288, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.091368] env[69994]: DEBUG nova.network.neutron [req-6983297a-339f-4423-b137-b4a9920fa286 req-cfd4cfb2-1c11-467a-815f-6d30d99c7076 service nova] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Updated VIF entry in instance network info cache for port 239a38e8-39e8-487b-ba99-cf85c99d41f1. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 636.091740] env[69994]: DEBUG nova.network.neutron [req-6983297a-339f-4423-b137-b4a9920fa286 req-cfd4cfb2-1c11-467a-815f-6d30d99c7076 service nova] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Updating instance_info_cache with network_info: [{"id": "239a38e8-39e8-487b-ba99-cf85c99d41f1", "address": "fa:16:3e:67:40:01", "network": {"id": "f408d322-470b-4fe5-9b10-67ef413491d1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1757029696-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5543f4937e604cc189cc63c178705112", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap239a38e8-39", "ovs_interfaceid": "239a38e8-39e8-487b-ba99-cf85c99d41f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.119879] env[69994]: DEBUG nova.compute.utils [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 636.128679] env[69994]: DEBUG nova.compute.manager [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 636.128679] env[69994]: DEBUG nova.network.neutron [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] [instance: f109c803-bf37-4845-8956-4336dbc8a946] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 636.190769] env[69994]: DEBUG oslo_vmware.api [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Task: {'id': task-3241281, 'name': PowerOnVM_Task, 'duration_secs': 0.691923} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.192345] env[69994]: DEBUG nova.policy [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e0af466bdfa94bcea27d2e652ba22ecf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a4edf952de324f52a5bc36e5d3d1b23b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 636.193889] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 636.194181] env[69994]: INFO nova.compute.manager [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Took 10.44 seconds to spawn the instance on the hypervisor. [ 636.194436] env[69994]: DEBUG nova.compute.manager [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 636.195340] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81b79341-00c9-45be-9655-41df3d5d80c3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.522542] env[69994]: DEBUG nova.network.neutron [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Successfully created port: 08fb6905-cef5-48b8-be29-8244adaf4c18 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 636.538185] env[69994]: DEBUG oslo_vmware.api [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Task: {'id': task-3241287, 'name': ReconfigVM_Task, 'duration_secs': 0.277393} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.541430] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Reconfigured VM instance instance-0000000b to attach disk [datastore2] 2f710439-0216-401e-9759-af584f9bd00d/2f710439-0216-401e-9759-af584f9bd00d.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 636.542337] env[69994]: DEBUG oslo_vmware.api [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': task-3241286, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068338} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.542557] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b0936679-deea-4e2d-b1eb-9b2b41801f02 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.561240] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 636.566256] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdf953c4-8adf-4de7-bfce-051218c2dd1f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.580953] env[69994]: DEBUG oslo_vmware.api [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3241288, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.590702] env[69994]: DEBUG oslo_vmware.api [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Waiting for the task: (returnval){ [ 636.590702] env[69994]: value = "task-3241289" [ 636.590702] env[69994]: _type = "Task" [ 636.590702] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.601261] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Reconfiguring VM instance instance-0000000d to attach disk [datastore2] 21f66039-6292-4d9c-b97d-668d029def24/21f66039-6292-4d9c-b97d-668d029def24.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 636.603617] env[69994]: DEBUG oslo_concurrency.lockutils [req-6983297a-339f-4423-b137-b4a9920fa286 req-cfd4cfb2-1c11-467a-815f-6d30d99c7076 service nova] Releasing lock "refresh_cache-15d17772-ac57-49a3-b261-bf49b902f658" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 636.604120] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-51b48543-6972-4ee0-80f2-122a02790b78 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.630344] env[69994]: DEBUG nova.compute.manager [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 636.643375] env[69994]: DEBUG oslo_vmware.api [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Task: {'id': task-3241289, 'name': Rename_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.643881] env[69994]: DEBUG oslo_vmware.api [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Waiting for the task: (returnval){ [ 636.643881] env[69994]: value = "task-3241290" [ 636.643881] env[69994]: _type = "Task" [ 636.643881] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.715385] env[69994]: INFO nova.compute.manager [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Took 27.36 seconds to build instance. [ 636.727664] env[69994]: DEBUG oslo_concurrency.lockutils [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Acquiring lock "6e8286a3-6fd1-44ee-a5ca-b21f3178334d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 636.727664] env[69994]: DEBUG oslo_concurrency.lockutils [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Lock "6e8286a3-6fd1-44ee-a5ca-b21f3178334d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 636.758237] env[69994]: DEBUG nova.network.neutron [req-b3af719d-42d3-4310-810c-b1685e933c3c req-bbf22133-7fd4-4d6c-9bd7-92e102e120b6 service nova] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Updated VIF entry in instance network info cache for port a16b9fc2-06de-47cb-b39f-b77130ed0eec. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 636.758590] env[69994]: DEBUG nova.network.neutron [req-b3af719d-42d3-4310-810c-b1685e933c3c req-bbf22133-7fd4-4d6c-9bd7-92e102e120b6 service nova] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Updating instance_info_cache with network_info: [{"id": "a16b9fc2-06de-47cb-b39f-b77130ed0eec", "address": "fa:16:3e:45:f4:b2", "network": {"id": "e6b2a322-ac01-400c-a8ec-68a371b2061c", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-141203958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e8b8ab56b87c46f9b960fc3b430197d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa16b9fc2-06", "ovs_interfaceid": "a16b9fc2-06de-47cb-b39f-b77130ed0eec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.794714] env[69994]: INFO nova.compute.manager [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Rebuilding instance [ 636.858557] env[69994]: DEBUG nova.compute.manager [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 636.859247] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9558ae82-8e1a-4f27-a0a5-2a7d619715e1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.052319] env[69994]: DEBUG oslo_vmware.api [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3241288, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.765577} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.053293] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] b003b7c2-e754-440e-8a65-13c5e9c68cd5/b003b7c2-e754-440e-8a65-13c5e9c68cd5.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 637.053510] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 637.054213] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6df88647-f7fe-477e-ae2d-4a1f38dbeba6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.056562] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ca52fb00-2e4d-4e3b-b449-4f5f4f86f4ae {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.064176] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72a7ea76-5ba2-4811-927e-92807b6d9584 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.066304] env[69994]: DEBUG oslo_vmware.api [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Waiting for the task: (returnval){ [ 637.066304] env[69994]: value = "task-3241291" [ 637.066304] env[69994]: _type = "Task" [ 637.066304] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.096662] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f33019d2-dbd3-47be-b446-5ec31f38ee10 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.101900] env[69994]: DEBUG oslo_vmware.api [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3241291, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.109390] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e88261dc-7d8a-44fe-b4f9-ed0df1d84716 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.116080] env[69994]: DEBUG oslo_vmware.api [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Task: {'id': task-3241289, 'name': Rename_Task, 'duration_secs': 0.240339} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.116680] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 637.116945] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8193e8db-131e-46ca-82dc-907a6fd096d3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.130462] env[69994]: DEBUG nova.network.neutron [-] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 637.130462] env[69994]: DEBUG nova.compute.provider_tree [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 637.135055] env[69994]: DEBUG oslo_vmware.api [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Waiting for the task: (returnval){ [ 637.135055] env[69994]: value = "task-3241292" [ 637.135055] env[69994]: _type = "Task" [ 637.135055] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.145181] env[69994]: DEBUG oslo_vmware.api [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Task: {'id': task-3241292, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.154420] env[69994]: DEBUG oslo_vmware.api [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': task-3241290, 'name': ReconfigVM_Task, 'duration_secs': 0.351895} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.154681] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Reconfigured VM instance instance-0000000d to attach disk [datastore2] 21f66039-6292-4d9c-b97d-668d029def24/21f66039-6292-4d9c-b97d-668d029def24.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 637.155363] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d4d02332-740e-4d82-b76a-b67df82c17d4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.162576] env[69994]: DEBUG oslo_vmware.api [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Waiting for the task: (returnval){ [ 637.162576] env[69994]: value = "task-3241293" [ 637.162576] env[69994]: _type = "Task" [ 637.162576] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.174592] env[69994]: DEBUG oslo_vmware.api [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': task-3241293, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.217778] env[69994]: DEBUG oslo_concurrency.lockutils [None req-41d5c85a-00f1-436e-be66-a0160f25cad0 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Lock "53a8714c-50f7-4990-a3d9-86f8fc908d03" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.874s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 637.261701] env[69994]: DEBUG oslo_concurrency.lockutils [req-b3af719d-42d3-4310-810c-b1685e933c3c req-bbf22133-7fd4-4d6c-9bd7-92e102e120b6 service nova] Releasing lock "refresh_cache-b003b7c2-e754-440e-8a65-13c5e9c68cd5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 637.576630] env[69994]: DEBUG oslo_vmware.api [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3241291, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059857} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.576980] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 637.577799] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb498d5d-4b56-4b15-af26-379454890e43 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.604227] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Reconfiguring VM instance instance-0000000c to attach disk [datastore2] b003b7c2-e754-440e-8a65-13c5e9c68cd5/b003b7c2-e754-440e-8a65-13c5e9c68cd5.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 637.604538] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6318d874-3d73-470a-bddf-8afcab1efd76 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.625436] env[69994]: DEBUG oslo_vmware.api [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Waiting for the task: (returnval){ [ 637.625436] env[69994]: value = "task-3241294" [ 637.625436] env[69994]: _type = "Task" [ 637.625436] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.633958] env[69994]: DEBUG oslo_vmware.api [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3241294, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.637441] env[69994]: DEBUG nova.scheduler.client.report [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 637.640604] env[69994]: INFO nova.compute.manager [-] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Took 1.78 seconds to deallocate network for instance. [ 637.642014] env[69994]: DEBUG nova.compute.manager [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 637.652150] env[69994]: DEBUG oslo_vmware.api [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Task: {'id': task-3241292, 'name': PowerOnVM_Task, 'duration_secs': 0.467453} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.652600] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 637.652797] env[69994]: INFO nova.compute.manager [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Took 9.42 seconds to spawn the instance on the hypervisor. [ 637.652970] env[69994]: DEBUG nova.compute.manager [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 637.653773] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-053cea1f-b7d2-49a1-8c8f-9c186c77b6bc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.667706] env[69994]: DEBUG nova.virt.hardware [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 637.667934] env[69994]: DEBUG nova.virt.hardware [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 637.668146] env[69994]: DEBUG nova.virt.hardware [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 637.668372] env[69994]: DEBUG nova.virt.hardware [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 637.668521] env[69994]: DEBUG nova.virt.hardware [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 637.668668] env[69994]: DEBUG nova.virt.hardware [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 637.668869] env[69994]: DEBUG nova.virt.hardware [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 637.669042] env[69994]: DEBUG nova.virt.hardware [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 637.669476] env[69994]: DEBUG nova.virt.hardware [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 637.669476] env[69994]: DEBUG nova.virt.hardware [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 637.669653] env[69994]: DEBUG nova.virt.hardware [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 637.674463] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05752ec9-5379-4e21-83bb-e86870e067b1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.686320] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74e9e817-0bbb-40f2-baf2-fa35e9f16b9c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.691405] env[69994]: DEBUG oslo_vmware.api [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': task-3241293, 'name': Rename_Task, 'duration_secs': 0.123375} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.691662] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 637.692290] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-93b59d2b-6eac-4092-9fae-04fa990a2c46 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.704341] env[69994]: DEBUG oslo_vmware.api [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Waiting for the task: (returnval){ [ 637.704341] env[69994]: value = "task-3241295" [ 637.704341] env[69994]: _type = "Task" [ 637.704341] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.713146] env[69994]: DEBUG oslo_vmware.api [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': task-3241295, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.721483] env[69994]: DEBUG nova.compute.manager [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 637.834925] env[69994]: DEBUG nova.compute.manager [req-12f64daa-959f-4084-807a-6849c23e4196 req-b9480530-253d-4747-b21a-938c3e332140 service nova] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Received event network-vif-deleted-c52664a0-1200-4c1c-9848-50d360e81f40 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 637.877562] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 637.878360] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cb7f2365-f26a-4de2-9b28-1a86f1e30246 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.886269] env[69994]: DEBUG oslo_vmware.api [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Waiting for the task: (returnval){ [ 637.886269] env[69994]: value = "task-3241296" [ 637.886269] env[69994]: _type = "Task" [ 637.886269] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.895999] env[69994]: DEBUG oslo_vmware.api [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241296, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.135705] env[69994]: DEBUG oslo_vmware.api [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3241294, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.136328] env[69994]: DEBUG nova.network.neutron [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Successfully updated port: 08fb6905-cef5-48b8-be29-8244adaf4c18 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 638.141732] env[69994]: DEBUG oslo_concurrency.lockutils [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.528s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 638.142696] env[69994]: DEBUG nova.compute.manager [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 638.145416] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.820s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 638.147677] env[69994]: INFO nova.compute.claims [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 638.153671] env[69994]: DEBUG oslo_concurrency.lockutils [None req-948c0d24-1dca-4889-8274-7e6f63bd5c70 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 638.188045] env[69994]: INFO nova.compute.manager [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Took 25.74 seconds to build instance. [ 638.215135] env[69994]: DEBUG oslo_vmware.api [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': task-3241295, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.245078] env[69994]: DEBUG oslo_concurrency.lockutils [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 638.380604] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Acquiring lock "0bfe4393-5b2a-487f-ba7a-858ed4c861a5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 638.380910] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Lock "0bfe4393-5b2a-487f-ba7a-858ed4c861a5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 638.397448] env[69994]: DEBUG oslo_vmware.api [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241296, 'name': PowerOffVM_Task, 'duration_secs': 0.181607} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.397681] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 638.397871] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 638.398658] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b86166dd-1839-47e6-a14b-61934cba7fa6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.407992] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 638.408457] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-07dba6b5-dd22-45fa-8da5-7c5e39bec717 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.431092] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 638.431396] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 638.431577] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Deleting the datastore file [datastore2] 84efe900-1d79-42f9-b3c6-54299757cdbc {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 638.431837] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0619edb7-a05a-4c1e-85ed-8894ca2fd26b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.438896] env[69994]: DEBUG oslo_vmware.api [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Waiting for the task: (returnval){ [ 638.438896] env[69994]: value = "task-3241298" [ 638.438896] env[69994]: _type = "Task" [ 638.438896] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.446777] env[69994]: DEBUG oslo_vmware.api [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241298, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.636596] env[69994]: DEBUG oslo_vmware.api [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3241294, 'name': ReconfigVM_Task, 'duration_secs': 0.610753} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.637082] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Reconfigured VM instance instance-0000000c to attach disk [datastore2] b003b7c2-e754-440e-8a65-13c5e9c68cd5/b003b7c2-e754-440e-8a65-13c5e9c68cd5.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 638.637797] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-efca408f-5637-412b-bf68-f9a8fccd4aaf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.639895] env[69994]: DEBUG oslo_concurrency.lockutils [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Acquiring lock "refresh_cache-f109c803-bf37-4845-8956-4336dbc8a946" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 638.640118] env[69994]: DEBUG oslo_concurrency.lockutils [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Acquired lock "refresh_cache-f109c803-bf37-4845-8956-4336dbc8a946" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 638.640345] env[69994]: DEBUG nova.network.neutron [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 638.647578] env[69994]: DEBUG oslo_vmware.api [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Waiting for the task: (returnval){ [ 638.647578] env[69994]: value = "task-3241299" [ 638.647578] env[69994]: _type = "Task" [ 638.647578] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.655476] env[69994]: DEBUG nova.compute.utils [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 638.656968] env[69994]: DEBUG nova.compute.manager [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 638.657235] env[69994]: DEBUG nova.network.neutron [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 638.666029] env[69994]: DEBUG oslo_vmware.api [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3241299, 'name': Rename_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.690304] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e0a5bc37-9523-460e-b747-5310b4170af3 tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Lock "2f710439-0216-401e-9759-af584f9bd00d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.106s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 638.700380] env[69994]: DEBUG nova.policy [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aae36590634048a5a1c9911d5a38a872', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6356297e311c4b47b689a7cda41127f6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 638.714459] env[69994]: DEBUG oslo_vmware.api [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': task-3241295, 'name': PowerOnVM_Task, 'duration_secs': 0.981388} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.714696] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 638.715021] env[69994]: INFO nova.compute.manager [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Took 5.55 seconds to spawn the instance on the hypervisor. [ 638.715148] env[69994]: DEBUG nova.compute.manager [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 638.715834] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15b38d10-164c-48ff-ae96-ea69c73c828e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.950745] env[69994]: DEBUG oslo_vmware.api [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241298, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.127001} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.951668] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 638.951998] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 638.952304] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 638.981983] env[69994]: DEBUG nova.network.neutron [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Successfully created port: bcae7796-2595-4bff-96c1-d85a7cba05d8 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 639.130749] env[69994]: DEBUG nova.compute.manager [req-7ad5fe1c-b73a-4a3b-9b2a-93aae4cdf1e7 req-b675dd03-2de9-46ae-9052-40e713e76877 service nova] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Received event network-vif-plugged-08fb6905-cef5-48b8-be29-8244adaf4c18 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 639.130749] env[69994]: DEBUG oslo_concurrency.lockutils [req-7ad5fe1c-b73a-4a3b-9b2a-93aae4cdf1e7 req-b675dd03-2de9-46ae-9052-40e713e76877 service nova] Acquiring lock "f109c803-bf37-4845-8956-4336dbc8a946-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 639.130749] env[69994]: DEBUG oslo_concurrency.lockutils [req-7ad5fe1c-b73a-4a3b-9b2a-93aae4cdf1e7 req-b675dd03-2de9-46ae-9052-40e713e76877 service nova] Lock "f109c803-bf37-4845-8956-4336dbc8a946-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 639.130749] env[69994]: DEBUG oslo_concurrency.lockutils [req-7ad5fe1c-b73a-4a3b-9b2a-93aae4cdf1e7 req-b675dd03-2de9-46ae-9052-40e713e76877 service nova] Lock "f109c803-bf37-4845-8956-4336dbc8a946-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 639.132012] env[69994]: DEBUG nova.compute.manager [req-7ad5fe1c-b73a-4a3b-9b2a-93aae4cdf1e7 req-b675dd03-2de9-46ae-9052-40e713e76877 service nova] [instance: f109c803-bf37-4845-8956-4336dbc8a946] No waiting events found dispatching network-vif-plugged-08fb6905-cef5-48b8-be29-8244adaf4c18 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 639.132392] env[69994]: WARNING nova.compute.manager [req-7ad5fe1c-b73a-4a3b-9b2a-93aae4cdf1e7 req-b675dd03-2de9-46ae-9052-40e713e76877 service nova] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Received unexpected event network-vif-plugged-08fb6905-cef5-48b8-be29-8244adaf4c18 for instance with vm_state building and task_state spawning. [ 639.166314] env[69994]: DEBUG nova.compute.manager [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 639.170652] env[69994]: DEBUG oslo_vmware.api [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3241299, 'name': Rename_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.198113] env[69994]: DEBUG nova.compute.manager [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 639.236040] env[69994]: INFO nova.compute.manager [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Took 20.49 seconds to build instance. [ 639.257936] env[69994]: DEBUG nova.network.neutron [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 639.582016] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28916694-1117-4bfb-baf3-c815994e3fc4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.588027] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-757ace3f-1a72-4847-84c9-f3be465438dd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.619787] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-316e40d2-850d-4b39-9fce-5b80c1832d05 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.629746] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c95b7670-daf9-407b-91d6-b63416c0d502 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.644082] env[69994]: DEBUG nova.compute.provider_tree [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 639.658295] env[69994]: DEBUG oslo_vmware.api [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3241299, 'name': Rename_Task, 'duration_secs': 0.720605} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.658771] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 639.659147] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e9510472-9ff5-406e-860c-1540761c07f9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.665093] env[69994]: DEBUG oslo_vmware.api [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Waiting for the task: (returnval){ [ 639.665093] env[69994]: value = "task-3241300" [ 639.665093] env[69994]: _type = "Task" [ 639.665093] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.676424] env[69994]: DEBUG oslo_vmware.api [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3241300, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.719250] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 639.741085] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c766e72-0970-4375-ac8b-091fa032ab40 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Lock "21f66039-6292-4d9c-b97d-668d029def24" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.009s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 639.983832] env[69994]: DEBUG nova.network.neutron [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Updating instance_info_cache with network_info: [{"id": "08fb6905-cef5-48b8-be29-8244adaf4c18", "address": "fa:16:3e:0e:bd:ba", "network": {"id": "08e6ba22-8e20-4c46-b472-d83940dc0b6d", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-228403378-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4edf952de324f52a5bc36e5d3d1b23b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0636c3f6-fcb7-4954-ab07-c5cd0dee37b0", "external-id": "nsx-vlan-transportzone-857", "segmentation_id": 857, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08fb6905-ce", "ovs_interfaceid": "08fb6905-cef5-48b8-be29-8244adaf4c18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 640.002172] env[69994]: DEBUG nova.virt.hardware [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 640.004120] env[69994]: DEBUG nova.virt.hardware [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 640.004120] env[69994]: DEBUG nova.virt.hardware [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 640.004120] env[69994]: DEBUG nova.virt.hardware [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 640.004120] env[69994]: DEBUG nova.virt.hardware [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 640.004893] env[69994]: DEBUG nova.virt.hardware [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 640.005197] env[69994]: DEBUG nova.virt.hardware [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 640.005375] env[69994]: DEBUG nova.virt.hardware [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 640.005534] env[69994]: DEBUG nova.virt.hardware [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 640.005701] env[69994]: DEBUG nova.virt.hardware [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 640.005917] env[69994]: DEBUG nova.virt.hardware [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 640.009442] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09c92afc-2121-4317-9d43-3b98b0969ea8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.020263] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebb8868b-3000-49ae-8fff-48d11b27f768 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.037584] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Instance VIF info [] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 640.044543] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 640.044666] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 640.044893] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9dc032f1-0ea4-4609-bb41-0ad648f3a360 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.062221] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 640.062221] env[69994]: value = "task-3241301" [ 640.062221] env[69994]: _type = "Task" [ 640.062221] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.071519] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241301, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.147456] env[69994]: DEBUG nova.scheduler.client.report [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 640.176654] env[69994]: DEBUG oslo_vmware.api [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3241300, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.178479] env[69994]: DEBUG nova.compute.manager [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 640.208207] env[69994]: DEBUG nova.virt.hardware [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:39:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='158583734',id=21,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-1369705633',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 640.208465] env[69994]: DEBUG nova.virt.hardware [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 640.208663] env[69994]: DEBUG nova.virt.hardware [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 640.209420] env[69994]: DEBUG nova.virt.hardware [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 640.210572] env[69994]: DEBUG nova.virt.hardware [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 640.210572] env[69994]: DEBUG nova.virt.hardware [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 640.210572] env[69994]: DEBUG nova.virt.hardware [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 640.210572] env[69994]: DEBUG nova.virt.hardware [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 640.210572] env[69994]: DEBUG nova.virt.hardware [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 640.210762] env[69994]: DEBUG nova.virt.hardware [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 640.210816] env[69994]: DEBUG nova.virt.hardware [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 640.212028] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50cab39b-2fe9-4e00-b779-c1c28c882ac3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.224540] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6da9037f-b988-4e1c-b61a-17cea8954e48 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.242890] env[69994]: DEBUG nova.compute.manager [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 640.486963] env[69994]: DEBUG oslo_concurrency.lockutils [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Releasing lock "refresh_cache-f109c803-bf37-4845-8956-4336dbc8a946" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 640.487815] env[69994]: DEBUG nova.compute.manager [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Instance network_info: |[{"id": "08fb6905-cef5-48b8-be29-8244adaf4c18", "address": "fa:16:3e:0e:bd:ba", "network": {"id": "08e6ba22-8e20-4c46-b472-d83940dc0b6d", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-228403378-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4edf952de324f52a5bc36e5d3d1b23b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0636c3f6-fcb7-4954-ab07-c5cd0dee37b0", "external-id": "nsx-vlan-transportzone-857", "segmentation_id": 857, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08fb6905-ce", "ovs_interfaceid": "08fb6905-cef5-48b8-be29-8244adaf4c18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 640.487973] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0e:bd:ba', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0636c3f6-fcb7-4954-ab07-c5cd0dee37b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '08fb6905-cef5-48b8-be29-8244adaf4c18', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 640.500748] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Creating folder: Project (a4edf952de324f52a5bc36e5d3d1b23b). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 640.503018] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6d2c4abb-82bb-4417-85aa-c08a45e6ac4c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.520343] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Created folder: Project (a4edf952de324f52a5bc36e5d3d1b23b) in parent group-v647729. [ 640.520343] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Creating folder: Instances. Parent ref: group-v647768. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 640.520343] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0a6836b1-8d91-4932-ad6d-dc96ea67ed21 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.520343] env[69994]: DEBUG nova.network.neutron [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Successfully updated port: bcae7796-2595-4bff-96c1-d85a7cba05d8 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 640.523415] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Created folder: Instances in parent group-v647768. [ 640.523600] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 640.523690] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 640.523864] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-db2c3259-067b-4de8-9a34-28b3666c21d0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.551325] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 640.551325] env[69994]: value = "task-3241304" [ 640.551325] env[69994]: _type = "Task" [ 640.551325] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.563266] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241304, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.572361] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241301, 'name': CreateVM_Task, 'duration_secs': 0.31679} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.572543] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 640.572945] env[69994]: DEBUG oslo_concurrency.lockutils [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 640.573125] env[69994]: DEBUG oslo_concurrency.lockutils [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 640.573439] env[69994]: DEBUG oslo_concurrency.lockutils [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 640.573680] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aaa622a4-f04c-4a43-b00e-bef5f4b8ed39 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.579497] env[69994]: DEBUG oslo_vmware.api [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Waiting for the task: (returnval){ [ 640.579497] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]526f3201-4b2f-8055-8749-fa45f4b8b53e" [ 640.579497] env[69994]: _type = "Task" [ 640.579497] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.590676] env[69994]: DEBUG oslo_vmware.api [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]526f3201-4b2f-8055-8749-fa45f4b8b53e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.610519] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Acquiring lock "e4013007-fd79-4d70-a9d1-70a4c621c0ea" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 640.611028] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Lock "e4013007-fd79-4d70-a9d1-70a4c621c0ea" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 640.653534] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.508s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 640.654077] env[69994]: DEBUG nova.compute.manager [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 640.657292] env[69994]: DEBUG oslo_concurrency.lockutils [None req-500e383a-cbfb-4ee9-b1aa-fddf1a8da572 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.186s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 640.657507] env[69994]: DEBUG nova.objects.instance [None req-500e383a-cbfb-4ee9-b1aa-fddf1a8da572 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Lazy-loading 'resources' on Instance uuid 91666839-f440-499e-acf0-07d352e701ab {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 640.677447] env[69994]: DEBUG oslo_vmware.api [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3241300, 'name': PowerOnVM_Task, 'duration_secs': 0.684342} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.677719] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 640.677939] env[69994]: INFO nova.compute.manager [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Took 9.91 seconds to spawn the instance on the hypervisor. [ 640.678153] env[69994]: DEBUG nova.compute.manager [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 640.678923] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-830bc2be-2f91-4ca7-83a4-f700c8084eee {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.770852] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 641.022272] env[69994]: DEBUG oslo_concurrency.lockutils [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Acquiring lock "refresh_cache-7e7953f7-ed5d-4515-9181-93d343ad772d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.022272] env[69994]: DEBUG oslo_concurrency.lockutils [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Acquired lock "refresh_cache-7e7953f7-ed5d-4515-9181-93d343ad772d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 641.022272] env[69994]: DEBUG nova.network.neutron [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 641.061844] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241304, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.093761] env[69994]: DEBUG oslo_vmware.api [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]526f3201-4b2f-8055-8749-fa45f4b8b53e, 'name': SearchDatastore_Task, 'duration_secs': 0.009547} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.094450] env[69994]: DEBUG oslo_concurrency.lockutils [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 641.094450] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 641.094730] env[69994]: DEBUG oslo_concurrency.lockutils [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.094940] env[69994]: DEBUG oslo_concurrency.lockutils [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 641.095225] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 641.095874] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5cd5229e-eba9-4aaa-92da-443ab3a1a0c4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.105357] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 641.106744] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 641.107880] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f12985c-3e36-4293-84cb-65b9943aaf81 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.116055] env[69994]: DEBUG oslo_vmware.api [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Waiting for the task: (returnval){ [ 641.116055] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52b3f274-7c5c-60f7-9437-e06de8867a9b" [ 641.116055] env[69994]: _type = "Task" [ 641.116055] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.130547] env[69994]: DEBUG oslo_vmware.api [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52b3f274-7c5c-60f7-9437-e06de8867a9b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.160531] env[69994]: DEBUG nova.compute.utils [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 641.169137] env[69994]: DEBUG nova.compute.manager [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 641.169137] env[69994]: DEBUG nova.network.neutron [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 641.204318] env[69994]: INFO nova.compute.manager [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Took 26.90 seconds to build instance. [ 641.227320] env[69994]: DEBUG nova.policy [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '42976cbf12d645ee8bbedf58c7d07603', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '16b66dfea80140689fa05c54842cdf96', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 641.520328] env[69994]: DEBUG nova.network.neutron [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Successfully created port: 73644aa1-0c58-40cd-8d52-00e4b388d8bf {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 641.565422] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241304, 'name': CreateVM_Task, 'duration_secs': 0.61625} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.565627] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 641.566392] env[69994]: DEBUG oslo_concurrency.lockutils [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.568253] env[69994]: DEBUG oslo_concurrency.lockutils [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 641.568665] env[69994]: DEBUG oslo_concurrency.lockutils [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 641.568965] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6baf13c-5a3a-4777-8a7b-0afacc3a9e6d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.579951] env[69994]: DEBUG nova.network.neutron [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 641.583312] env[69994]: DEBUG oslo_vmware.api [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Waiting for the task: (returnval){ [ 641.583312] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52e1d3f4-cbf9-84ed-e97d-062e5fbb7274" [ 641.583312] env[69994]: _type = "Task" [ 641.583312] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.594989] env[69994]: DEBUG oslo_vmware.api [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52e1d3f4-cbf9-84ed-e97d-062e5fbb7274, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.630708] env[69994]: DEBUG oslo_vmware.api [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52b3f274-7c5c-60f7-9437-e06de8867a9b, 'name': SearchDatastore_Task, 'duration_secs': 0.014648} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.638774] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-330e228a-c393-4a44-b514-ec25d89df541 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.650682] env[69994]: DEBUG oslo_vmware.api [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Waiting for the task: (returnval){ [ 641.650682] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5200964c-52be-793c-1264-d01cb2db6779" [ 641.650682] env[69994]: _type = "Task" [ 641.650682] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.657913] env[69994]: DEBUG oslo_vmware.api [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5200964c-52be-793c-1264-d01cb2db6779, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.659823] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3a20904-3513-421a-8598-3f9faadcf44d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.666977] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-718d1fc7-e845-437d-bbb9-39a66a7cc805 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.671455] env[69994]: DEBUG nova.compute.manager [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 641.710160] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-844cfe12-0e16-4532-b568-1e11d8c11ddd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.710377] env[69994]: DEBUG oslo_concurrency.lockutils [None req-84783661-7499-465e-a626-6b81a1ca07a7 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Lock "b003b7c2-e754-440e-8a65-13c5e9c68cd5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.550s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 641.718945] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-187b0e67-e35a-4594-a11c-b52634eeebaa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.727503] env[69994]: DEBUG nova.compute.manager [req-1d9fb590-e7fa-4ca1-92c2-36648ba80a35 req-7c3b58d3-c629-4101-8121-1dceef5f6d3f service nova] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Received event network-changed-be9f669d-36ab-4cbd-a56f-5db33a833aa8 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 641.727735] env[69994]: DEBUG nova.compute.manager [req-1d9fb590-e7fa-4ca1-92c2-36648ba80a35 req-7c3b58d3-c629-4101-8121-1dceef5f6d3f service nova] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Refreshing instance network info cache due to event network-changed-be9f669d-36ab-4cbd-a56f-5db33a833aa8. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 641.728428] env[69994]: DEBUG oslo_concurrency.lockutils [req-1d9fb590-e7fa-4ca1-92c2-36648ba80a35 req-7c3b58d3-c629-4101-8121-1dceef5f6d3f service nova] Acquiring lock "refresh_cache-53a8714c-50f7-4990-a3d9-86f8fc908d03" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.728428] env[69994]: DEBUG oslo_concurrency.lockutils [req-1d9fb590-e7fa-4ca1-92c2-36648ba80a35 req-7c3b58d3-c629-4101-8121-1dceef5f6d3f service nova] Acquired lock "refresh_cache-53a8714c-50f7-4990-a3d9-86f8fc908d03" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 641.728428] env[69994]: DEBUG nova.network.neutron [req-1d9fb590-e7fa-4ca1-92c2-36648ba80a35 req-7c3b58d3-c629-4101-8121-1dceef5f6d3f service nova] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Refreshing network info cache for port be9f669d-36ab-4cbd-a56f-5db33a833aa8 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 641.740427] env[69994]: DEBUG nova.compute.provider_tree [None req-500e383a-cbfb-4ee9-b1aa-fddf1a8da572 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 641.830027] env[69994]: DEBUG nova.network.neutron [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Updating instance_info_cache with network_info: [{"id": "bcae7796-2595-4bff-96c1-d85a7cba05d8", "address": "fa:16:3e:32:99:ad", "network": {"id": "c152967f-46eb-4bab-a1fd-985f8d1e41e3", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-381864411-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6356297e311c4b47b689a7cda41127f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "274afb4c-04df-4213-8ad2-8f48a10d78a8", "external-id": "nsx-vlan-transportzone-515", "segmentation_id": 515, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcae7796-25", "ovs_interfaceid": "bcae7796-2595-4bff-96c1-d85a7cba05d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 642.100579] env[69994]: DEBUG oslo_vmware.api [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52e1d3f4-cbf9-84ed-e97d-062e5fbb7274, 'name': SearchDatastore_Task, 'duration_secs': 0.028168} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.101850] env[69994]: DEBUG oslo_concurrency.lockutils [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 642.101850] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 642.101850] env[69994]: DEBUG oslo_concurrency.lockutils [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.163864] env[69994]: DEBUG oslo_vmware.api [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5200964c-52be-793c-1264-d01cb2db6779, 'name': SearchDatastore_Task, 'duration_secs': 0.02124} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.163864] env[69994]: DEBUG oslo_concurrency.lockutils [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 642.163864] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 84efe900-1d79-42f9-b3c6-54299757cdbc/84efe900-1d79-42f9-b3c6-54299757cdbc.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 642.164289] env[69994]: DEBUG oslo_concurrency.lockutils [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 642.164289] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 642.164568] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f71c18c0-1a9e-49df-a703-60ad5837dbd3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.166693] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3042a64f-65f4-4651-a78a-0a3f1caa58d6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.182076] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 642.182076] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 642.182076] env[69994]: DEBUG oslo_vmware.api [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Waiting for the task: (returnval){ [ 642.182076] env[69994]: value = "task-3241305" [ 642.182076] env[69994]: _type = "Task" [ 642.182076] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.182076] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06037e94-3691-41ad-8bbb-32ab08a628c6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.190060] env[69994]: DEBUG oslo_vmware.api [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Waiting for the task: (returnval){ [ 642.190060] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52e6c02c-db0f-8458-0844-b2cd05f9d4a0" [ 642.190060] env[69994]: _type = "Task" [ 642.190060] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.196561] env[69994]: DEBUG oslo_vmware.api [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241305, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.207020] env[69994]: DEBUG oslo_vmware.api [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52e6c02c-db0f-8458-0844-b2cd05f9d4a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.213314] env[69994]: DEBUG nova.compute.manager [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 642.243608] env[69994]: DEBUG nova.scheduler.client.report [None req-500e383a-cbfb-4ee9-b1aa-fddf1a8da572 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 642.334209] env[69994]: DEBUG oslo_concurrency.lockutils [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Releasing lock "refresh_cache-7e7953f7-ed5d-4515-9181-93d343ad772d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 642.334209] env[69994]: DEBUG nova.compute.manager [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Instance network_info: |[{"id": "bcae7796-2595-4bff-96c1-d85a7cba05d8", "address": "fa:16:3e:32:99:ad", "network": {"id": "c152967f-46eb-4bab-a1fd-985f8d1e41e3", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-381864411-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6356297e311c4b47b689a7cda41127f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "274afb4c-04df-4213-8ad2-8f48a10d78a8", "external-id": "nsx-vlan-transportzone-515", "segmentation_id": 515, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcae7796-25", "ovs_interfaceid": "bcae7796-2595-4bff-96c1-d85a7cba05d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 642.334398] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:32:99:ad', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '274afb4c-04df-4213-8ad2-8f48a10d78a8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bcae7796-2595-4bff-96c1-d85a7cba05d8', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 642.342865] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Creating folder: Project (6356297e311c4b47b689a7cda41127f6). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 642.343330] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-af42547f-6d78-478e-a67f-781f92ca6aa2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.361147] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Created folder: Project (6356297e311c4b47b689a7cda41127f6) in parent group-v647729. [ 642.361147] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Creating folder: Instances. Parent ref: group-v647771. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 642.361147] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e686d460-bfce-445c-9d14-da0196ea6a6e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.370482] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Created folder: Instances in parent group-v647771. [ 642.370482] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 642.371270] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 642.371506] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fb2a34ff-17df-409d-8330-51b57d05eb88 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.396907] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 642.396907] env[69994]: value = "task-3241308" [ 642.396907] env[69994]: _type = "Task" [ 642.396907] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.406437] env[69994]: DEBUG oslo_concurrency.lockutils [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Acquiring lock "153f0ead-6e2f-4077-b86a-00d3a1114fed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 642.406703] env[69994]: DEBUG oslo_concurrency.lockutils [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Lock "153f0ead-6e2f-4077-b86a-00d3a1114fed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 642.414825] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241308, 'name': CreateVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.593339] env[69994]: DEBUG nova.compute.manager [req-8718b939-6a5d-4ed9-8698-190f540c58e0 req-40b60800-9ace-415b-a495-4513f3b7ff54 service nova] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Received event network-changed-08fb6905-cef5-48b8-be29-8244adaf4c18 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 642.593339] env[69994]: DEBUG nova.compute.manager [req-8718b939-6a5d-4ed9-8698-190f540c58e0 req-40b60800-9ace-415b-a495-4513f3b7ff54 service nova] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Refreshing instance network info cache due to event network-changed-08fb6905-cef5-48b8-be29-8244adaf4c18. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 642.593339] env[69994]: DEBUG oslo_concurrency.lockutils [req-8718b939-6a5d-4ed9-8698-190f540c58e0 req-40b60800-9ace-415b-a495-4513f3b7ff54 service nova] Acquiring lock "refresh_cache-f109c803-bf37-4845-8956-4336dbc8a946" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.593339] env[69994]: DEBUG oslo_concurrency.lockutils [req-8718b939-6a5d-4ed9-8698-190f540c58e0 req-40b60800-9ace-415b-a495-4513f3b7ff54 service nova] Acquired lock "refresh_cache-f109c803-bf37-4845-8956-4336dbc8a946" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 642.593339] env[69994]: DEBUG nova.network.neutron [req-8718b939-6a5d-4ed9-8698-190f540c58e0 req-40b60800-9ace-415b-a495-4513f3b7ff54 service nova] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Refreshing network info cache for port 08fb6905-cef5-48b8-be29-8244adaf4c18 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 642.683749] env[69994]: DEBUG nova.compute.manager [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 642.698178] env[69994]: DEBUG oslo_vmware.api [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241305, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.708246] env[69994]: DEBUG oslo_vmware.api [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52e6c02c-db0f-8458-0844-b2cd05f9d4a0, 'name': SearchDatastore_Task, 'duration_secs': 0.012132} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.712289] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-decff058-b7ba-41bb-9f35-131fbcde4ee8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.720249] env[69994]: DEBUG oslo_vmware.api [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Waiting for the task: (returnval){ [ 642.720249] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]529b3841-54b1-8e4c-bdf4-67f17686c12f" [ 642.720249] env[69994]: _type = "Task" [ 642.720249] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.722467] env[69994]: DEBUG nova.virt.hardware [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 642.722705] env[69994]: DEBUG nova.virt.hardware [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 642.722991] env[69994]: DEBUG nova.virt.hardware [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 642.723081] env[69994]: DEBUG nova.virt.hardware [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 642.723217] env[69994]: DEBUG nova.virt.hardware [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 642.723358] env[69994]: DEBUG nova.virt.hardware [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 642.723924] env[69994]: DEBUG nova.virt.hardware [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 642.723924] env[69994]: DEBUG nova.virt.hardware [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 642.723924] env[69994]: DEBUG nova.virt.hardware [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 642.724120] env[69994]: DEBUG nova.virt.hardware [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 642.724203] env[69994]: DEBUG nova.virt.hardware [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 642.725368] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee44161c-3c98-4204-bd6b-39a66ab1eac8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.733761] env[69994]: DEBUG nova.network.neutron [req-1d9fb590-e7fa-4ca1-92c2-36648ba80a35 req-7c3b58d3-c629-4101-8121-1dceef5f6d3f service nova] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Updated VIF entry in instance network info cache for port be9f669d-36ab-4cbd-a56f-5db33a833aa8. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 642.734137] env[69994]: DEBUG nova.network.neutron [req-1d9fb590-e7fa-4ca1-92c2-36648ba80a35 req-7c3b58d3-c629-4101-8121-1dceef5f6d3f service nova] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Updating instance_info_cache with network_info: [{"id": "be9f669d-36ab-4cbd-a56f-5db33a833aa8", "address": "fa:16:3e:e2:6b:09", "network": {"id": "309c6f91-53cb-40c8-a685-71dd7afb097b", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-898903642-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.236", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3405ff9c38dd46ba98df2d9a835ed860", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b9aabc7c-0f6c-42eb-bd27-493a1496c0c8", "external-id": "nsx-vlan-transportzone-368", "segmentation_id": 368, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe9f669d-36", "ovs_interfaceid": "be9f669d-36ab-4cbd-a56f-5db33a833aa8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 642.747626] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26363900-3863-4e9b-803c-f7eb370e9ef2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.751598] env[69994]: DEBUG oslo_vmware.api [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]529b3841-54b1-8e4c-bdf4-67f17686c12f, 'name': SearchDatastore_Task, 'duration_secs': 0.01944} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.752662] env[69994]: DEBUG oslo_concurrency.lockutils [None req-500e383a-cbfb-4ee9-b1aa-fddf1a8da572 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.095s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 642.754690] env[69994]: DEBUG oslo_concurrency.lockutils [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 642.754690] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] f109c803-bf37-4845-8956-4336dbc8a946/f109c803-bf37-4845-8956-4336dbc8a946.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 642.755875] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.882s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 642.757499] env[69994]: INFO nova.compute.claims [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 642.760365] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-971e47dc-bf26-494d-bfd5-9612d34b7e41 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.773466] env[69994]: DEBUG oslo_concurrency.lockutils [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 642.779558] env[69994]: DEBUG oslo_vmware.api [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Waiting for the task: (returnval){ [ 642.779558] env[69994]: value = "task-3241309" [ 642.779558] env[69994]: _type = "Task" [ 642.779558] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.785458] env[69994]: INFO nova.scheduler.client.report [None req-500e383a-cbfb-4ee9-b1aa-fddf1a8da572 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Deleted allocations for instance 91666839-f440-499e-acf0-07d352e701ab [ 642.789889] env[69994]: DEBUG oslo_vmware.api [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Task: {'id': task-3241309, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.852872] env[69994]: INFO nova.compute.manager [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Rebuilding instance [ 642.897077] env[69994]: DEBUG nova.compute.manager [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 642.898027] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9d8228d-7cd3-419d-abaf-4d7d9d89056f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.909886] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241308, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.201293] env[69994]: DEBUG oslo_vmware.api [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241305, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.531482} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.201609] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 84efe900-1d79-42f9-b3c6-54299757cdbc/84efe900-1d79-42f9-b3c6-54299757cdbc.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 643.201826] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 643.202422] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cfc4200b-e775-4609-b97f-ed37cf8e787c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.211144] env[69994]: DEBUG oslo_vmware.api [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Waiting for the task: (returnval){ [ 643.211144] env[69994]: value = "task-3241310" [ 643.211144] env[69994]: _type = "Task" [ 643.211144] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.222639] env[69994]: DEBUG oslo_vmware.api [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241310, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.237687] env[69994]: DEBUG oslo_concurrency.lockutils [req-1d9fb590-e7fa-4ca1-92c2-36648ba80a35 req-7c3b58d3-c629-4101-8121-1dceef5f6d3f service nova] Releasing lock "refresh_cache-53a8714c-50f7-4990-a3d9-86f8fc908d03" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 643.291347] env[69994]: DEBUG oslo_vmware.api [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Task: {'id': task-3241309, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.297085] env[69994]: DEBUG oslo_concurrency.lockutils [None req-500e383a-cbfb-4ee9-b1aa-fddf1a8da572 tempest-ServerDiagnosticsTest-1635752550 tempest-ServerDiagnosticsTest-1635752550-project-member] Lock "91666839-f440-499e-acf0-07d352e701ab" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.856s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 643.411380] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241308, 'name': CreateVM_Task, 'duration_secs': 0.785856} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.411608] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 643.413220] env[69994]: DEBUG oslo_concurrency.lockutils [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 643.413220] env[69994]: DEBUG oslo_concurrency.lockutils [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 643.413220] env[69994]: DEBUG oslo_concurrency.lockutils [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 643.413450] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d547836c-c61f-4704-b8ad-2b79cd0724d0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.426410] env[69994]: DEBUG oslo_vmware.api [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Waiting for the task: (returnval){ [ 643.426410] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5278957b-d6c3-783d-87b5-82542a24f335" [ 643.426410] env[69994]: _type = "Task" [ 643.426410] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.427377] env[69994]: DEBUG nova.network.neutron [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Successfully updated port: 73644aa1-0c58-40cd-8d52-00e4b388d8bf {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 643.442526] env[69994]: DEBUG oslo_vmware.api [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5278957b-d6c3-783d-87b5-82542a24f335, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.722130] env[69994]: DEBUG oslo_vmware.api [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241310, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087336} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.722538] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 643.723491] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1680ebab-21d4-435a-805e-9ff0bf0c7305 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.747836] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Reconfiguring VM instance instance-00000009 to attach disk [datastore1] 84efe900-1d79-42f9-b3c6-54299757cdbc/84efe900-1d79-42f9-b3c6-54299757cdbc.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 643.748258] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d2257b25-dba3-4f41-b2f7-dd60571d9717 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.769604] env[69994]: DEBUG oslo_vmware.api [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Waiting for the task: (returnval){ [ 643.769604] env[69994]: value = "task-3241311" [ 643.769604] env[69994]: _type = "Task" [ 643.769604] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.781566] env[69994]: DEBUG oslo_vmware.api [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241311, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.792117] env[69994]: DEBUG oslo_vmware.api [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Task: {'id': task-3241309, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.740364} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.792439] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] f109c803-bf37-4845-8956-4336dbc8a946/f109c803-bf37-4845-8956-4336dbc8a946.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 643.792677] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 643.792953] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8d475459-ec60-41b0-8376-d30d6a08781e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.799334] env[69994]: DEBUG oslo_vmware.api [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Waiting for the task: (returnval){ [ 643.799334] env[69994]: value = "task-3241312" [ 643.799334] env[69994]: _type = "Task" [ 643.799334] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.804229] env[69994]: DEBUG nova.network.neutron [req-8718b939-6a5d-4ed9-8698-190f540c58e0 req-40b60800-9ace-415b-a495-4513f3b7ff54 service nova] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Updated VIF entry in instance network info cache for port 08fb6905-cef5-48b8-be29-8244adaf4c18. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 643.804229] env[69994]: DEBUG nova.network.neutron [req-8718b939-6a5d-4ed9-8698-190f540c58e0 req-40b60800-9ace-415b-a495-4513f3b7ff54 service nova] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Updating instance_info_cache with network_info: [{"id": "08fb6905-cef5-48b8-be29-8244adaf4c18", "address": "fa:16:3e:0e:bd:ba", "network": {"id": "08e6ba22-8e20-4c46-b472-d83940dc0b6d", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-228403378-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4edf952de324f52a5bc36e5d3d1b23b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0636c3f6-fcb7-4954-ab07-c5cd0dee37b0", "external-id": "nsx-vlan-transportzone-857", "segmentation_id": 857, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08fb6905-ce", "ovs_interfaceid": "08fb6905-cef5-48b8-be29-8244adaf4c18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.809480] env[69994]: DEBUG oslo_vmware.api [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Task: {'id': task-3241312, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.919245] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 643.919560] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-976a9cda-4ff8-42c7-94f3-2d597114c5d9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.929608] env[69994]: DEBUG oslo_vmware.api [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Waiting for the task: (returnval){ [ 643.929608] env[69994]: value = "task-3241313" [ 643.929608] env[69994]: _type = "Task" [ 643.929608] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.937573] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Acquiring lock "refresh_cache-91bb882c-7b84-450f-bd03-91ea1ce739ce" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 643.937739] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Acquired lock "refresh_cache-91bb882c-7b84-450f-bd03-91ea1ce739ce" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 643.937897] env[69994]: DEBUG nova.network.neutron [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 643.945347] env[69994]: DEBUG oslo_vmware.api [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': task-3241313, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.952463] env[69994]: DEBUG oslo_vmware.api [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5278957b-d6c3-783d-87b5-82542a24f335, 'name': SearchDatastore_Task, 'duration_secs': 0.05602} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.952463] env[69994]: DEBUG oslo_concurrency.lockutils [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 643.952463] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 643.952463] env[69994]: DEBUG oslo_concurrency.lockutils [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 643.952999] env[69994]: DEBUG oslo_concurrency.lockutils [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 643.952999] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 643.952999] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-519c99d1-c6db-4131-b18f-b06b55dbde0e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.963336] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 643.963443] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 643.964220] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94972be7-0aeb-4cb9-ab53-d0a3aaca1867 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.971619] env[69994]: DEBUG oslo_vmware.api [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Waiting for the task: (returnval){ [ 643.971619] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52501169-3bec-0154-f2d6-76fe361ec00d" [ 643.971619] env[69994]: _type = "Task" [ 643.971619] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.982652] env[69994]: DEBUG oslo_vmware.api [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52501169-3bec-0154-f2d6-76fe361ec00d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.253358] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c4bc0b5-5516-4915-9cee-856bc405bd5f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.261596] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f7ad2cb-d43f-4b33-9b2a-21ea9585d9b4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.295353] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23baeb03-3b40-4549-b8b9-75771588001e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.302513] env[69994]: DEBUG oslo_vmware.api [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241311, 'name': ReconfigVM_Task, 'duration_secs': 0.444106} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.307069] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Reconfigured VM instance instance-00000009 to attach disk [datastore1] 84efe900-1d79-42f9-b3c6-54299757cdbc/84efe900-1d79-42f9-b3c6-54299757cdbc.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 644.307896] env[69994]: DEBUG oslo_concurrency.lockutils [req-8718b939-6a5d-4ed9-8698-190f540c58e0 req-40b60800-9ace-415b-a495-4513f3b7ff54 service nova] Releasing lock "refresh_cache-f109c803-bf37-4845-8956-4336dbc8a946" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 644.308141] env[69994]: DEBUG nova.compute.manager [req-8718b939-6a5d-4ed9-8698-190f540c58e0 req-40b60800-9ace-415b-a495-4513f3b7ff54 service nova] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Received event network-vif-plugged-bcae7796-2595-4bff-96c1-d85a7cba05d8 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 644.308329] env[69994]: DEBUG oslo_concurrency.lockutils [req-8718b939-6a5d-4ed9-8698-190f540c58e0 req-40b60800-9ace-415b-a495-4513f3b7ff54 service nova] Acquiring lock "7e7953f7-ed5d-4515-9181-93d343ad772d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 644.308580] env[69994]: DEBUG oslo_concurrency.lockutils [req-8718b939-6a5d-4ed9-8698-190f540c58e0 req-40b60800-9ace-415b-a495-4513f3b7ff54 service nova] Lock "7e7953f7-ed5d-4515-9181-93d343ad772d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 644.308757] env[69994]: DEBUG oslo_concurrency.lockutils [req-8718b939-6a5d-4ed9-8698-190f540c58e0 req-40b60800-9ace-415b-a495-4513f3b7ff54 service nova] Lock "7e7953f7-ed5d-4515-9181-93d343ad772d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 644.308833] env[69994]: DEBUG nova.compute.manager [req-8718b939-6a5d-4ed9-8698-190f540c58e0 req-40b60800-9ace-415b-a495-4513f3b7ff54 service nova] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] No waiting events found dispatching network-vif-plugged-bcae7796-2595-4bff-96c1-d85a7cba05d8 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 644.308983] env[69994]: WARNING nova.compute.manager [req-8718b939-6a5d-4ed9-8698-190f540c58e0 req-40b60800-9ace-415b-a495-4513f3b7ff54 service nova] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Received unexpected event network-vif-plugged-bcae7796-2595-4bff-96c1-d85a7cba05d8 for instance with vm_state building and task_state spawning. [ 644.309191] env[69994]: DEBUG nova.compute.manager [req-8718b939-6a5d-4ed9-8698-190f540c58e0 req-40b60800-9ace-415b-a495-4513f3b7ff54 service nova] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Received event network-changed-bcae7796-2595-4bff-96c1-d85a7cba05d8 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 644.309367] env[69994]: DEBUG nova.compute.manager [req-8718b939-6a5d-4ed9-8698-190f540c58e0 req-40b60800-9ace-415b-a495-4513f3b7ff54 service nova] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Refreshing instance network info cache due to event network-changed-bcae7796-2595-4bff-96c1-d85a7cba05d8. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 644.309606] env[69994]: DEBUG oslo_concurrency.lockutils [req-8718b939-6a5d-4ed9-8698-190f540c58e0 req-40b60800-9ace-415b-a495-4513f3b7ff54 service nova] Acquiring lock "refresh_cache-7e7953f7-ed5d-4515-9181-93d343ad772d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.309915] env[69994]: DEBUG oslo_concurrency.lockutils [req-8718b939-6a5d-4ed9-8698-190f540c58e0 req-40b60800-9ace-415b-a495-4513f3b7ff54 service nova] Acquired lock "refresh_cache-7e7953f7-ed5d-4515-9181-93d343ad772d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 644.310066] env[69994]: DEBUG nova.network.neutron [req-8718b939-6a5d-4ed9-8698-190f540c58e0 req-40b60800-9ace-415b-a495-4513f3b7ff54 service nova] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Refreshing network info cache for port bcae7796-2595-4bff-96c1-d85a7cba05d8 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 644.311155] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b6e05729-b9d3-4fca-8da2-03087cc91fe0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.313610] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adf2bb95-cdb8-4fa4-9ee5-bc27709e71c5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.322788] env[69994]: DEBUG oslo_vmware.api [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Task: {'id': task-3241312, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.18553} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.331471] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 644.331832] env[69994]: DEBUG oslo_vmware.api [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Waiting for the task: (returnval){ [ 644.331832] env[69994]: value = "task-3241314" [ 644.331832] env[69994]: _type = "Task" [ 644.331832] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.332304] env[69994]: DEBUG nova.compute.provider_tree [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 644.333967] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-112256d9-5e21-4b91-8080-01a57db9d354 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.360726] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Reconfiguring VM instance instance-0000000e to attach disk [datastore1] f109c803-bf37-4845-8956-4336dbc8a946/f109c803-bf37-4845-8956-4336dbc8a946.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 644.364476] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-035c58dc-d17e-4bd7-92d1-fd55a869252c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.389616] env[69994]: DEBUG oslo_vmware.api [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Waiting for the task: (returnval){ [ 644.389616] env[69994]: value = "task-3241315" [ 644.389616] env[69994]: _type = "Task" [ 644.389616] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.400578] env[69994]: DEBUG oslo_vmware.api [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Task: {'id': task-3241315, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.437970] env[69994]: DEBUG oslo_vmware.api [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': task-3241313, 'name': PowerOffVM_Task, 'duration_secs': 0.117307} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.438263] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 644.438490] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 644.439270] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e597398-9835-42e2-835c-9f67dd830d45 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.446303] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 644.448417] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7c8f6603-fa35-4f9a-93c2-f26ec06f5c44 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.482619] env[69994]: DEBUG oslo_vmware.api [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52501169-3bec-0154-f2d6-76fe361ec00d, 'name': SearchDatastore_Task, 'duration_secs': 0.021581} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.483444] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-804e59f5-5046-4182-8866-3c84fd08e6b9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.486813] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 644.487074] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 644.487268] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Deleting the datastore file [datastore2] 21f66039-6292-4d9c-b97d-668d029def24 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 644.487883] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9e6a1c81-64a8-4f34-8e5c-bc8e967bbeea {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.490705] env[69994]: DEBUG oslo_vmware.api [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Waiting for the task: (returnval){ [ 644.490705] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]520b39cd-64e6-765b-3246-7dc917b8e264" [ 644.490705] env[69994]: _type = "Task" [ 644.490705] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.495311] env[69994]: DEBUG oslo_vmware.api [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Waiting for the task: (returnval){ [ 644.495311] env[69994]: value = "task-3241317" [ 644.495311] env[69994]: _type = "Task" [ 644.495311] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.501119] env[69994]: DEBUG oslo_vmware.api [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]520b39cd-64e6-765b-3246-7dc917b8e264, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.505530] env[69994]: DEBUG oslo_vmware.api [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': task-3241317, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.506499] env[69994]: DEBUG nova.network.neutron [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 644.754626] env[69994]: DEBUG nova.network.neutron [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Updating instance_info_cache with network_info: [{"id": "73644aa1-0c58-40cd-8d52-00e4b388d8bf", "address": "fa:16:3e:32:89:12", "network": {"id": "e0a34d39-6f06-4258-9bc2-7b70aa37964a", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-455935249-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16b66dfea80140689fa05c54842cdf96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b7a73c01-1bb9-4612-a1a7-16d71b732e81", "external-id": "nsx-vlan-transportzone-711", "segmentation_id": 711, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73644aa1-0c", "ovs_interfaceid": "73644aa1-0c58-40cd-8d52-00e4b388d8bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 644.797218] env[69994]: DEBUG nova.compute.manager [req-c1df3047-1a88-4fb6-8f4d-1086c98940f1 req-78e6e0f9-00d9-4396-ab54-aa72ed9f4dbe service nova] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Received event network-changed-b3347b62-0c9c-4b6c-8d07-587f2423850c {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 644.797218] env[69994]: DEBUG nova.compute.manager [req-c1df3047-1a88-4fb6-8f4d-1086c98940f1 req-78e6e0f9-00d9-4396-ab54-aa72ed9f4dbe service nova] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Refreshing instance network info cache due to event network-changed-b3347b62-0c9c-4b6c-8d07-587f2423850c. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 644.797218] env[69994]: DEBUG oslo_concurrency.lockutils [req-c1df3047-1a88-4fb6-8f4d-1086c98940f1 req-78e6e0f9-00d9-4396-ab54-aa72ed9f4dbe service nova] Acquiring lock "refresh_cache-2f710439-0216-401e-9759-af584f9bd00d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.797218] env[69994]: DEBUG oslo_concurrency.lockutils [req-c1df3047-1a88-4fb6-8f4d-1086c98940f1 req-78e6e0f9-00d9-4396-ab54-aa72ed9f4dbe service nova] Acquired lock "refresh_cache-2f710439-0216-401e-9759-af584f9bd00d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 644.797218] env[69994]: DEBUG nova.network.neutron [req-c1df3047-1a88-4fb6-8f4d-1086c98940f1 req-78e6e0f9-00d9-4396-ab54-aa72ed9f4dbe service nova] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Refreshing network info cache for port b3347b62-0c9c-4b6c-8d07-587f2423850c {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 644.838235] env[69994]: DEBUG nova.scheduler.client.report [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 644.853710] env[69994]: DEBUG oslo_vmware.api [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241314, 'name': Rename_Task, 'duration_secs': 0.153837} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.855021] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 644.855323] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7c8469d4-d743-4515-a82a-d61ddb5da6fe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.864027] env[69994]: DEBUG oslo_vmware.api [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Waiting for the task: (returnval){ [ 644.864027] env[69994]: value = "task-3241318" [ 644.864027] env[69994]: _type = "Task" [ 644.864027] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.876347] env[69994]: DEBUG oslo_vmware.api [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241318, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.903412] env[69994]: DEBUG oslo_vmware.api [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Task: {'id': task-3241315, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.009078] env[69994]: DEBUG oslo_vmware.api [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]520b39cd-64e6-765b-3246-7dc917b8e264, 'name': SearchDatastore_Task, 'duration_secs': 0.035132} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.015636] env[69994]: DEBUG oslo_concurrency.lockutils [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 645.016148] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 7e7953f7-ed5d-4515-9181-93d343ad772d/7e7953f7-ed5d-4515-9181-93d343ad772d.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 645.016891] env[69994]: DEBUG oslo_vmware.api [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': task-3241317, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.361834} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.016891] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-78312a2c-4d60-4cac-bcec-7d0481202e79 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.020136] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 645.020136] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 645.020136] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 645.027433] env[69994]: DEBUG oslo_vmware.api [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Waiting for the task: (returnval){ [ 645.027433] env[69994]: value = "task-3241319" [ 645.027433] env[69994]: _type = "Task" [ 645.027433] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.035566] env[69994]: DEBUG oslo_vmware.api [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3241319, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.257548] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Releasing lock "refresh_cache-91bb882c-7b84-450f-bd03-91ea1ce739ce" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 645.257932] env[69994]: DEBUG nova.compute.manager [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Instance network_info: |[{"id": "73644aa1-0c58-40cd-8d52-00e4b388d8bf", "address": "fa:16:3e:32:89:12", "network": {"id": "e0a34d39-6f06-4258-9bc2-7b70aa37964a", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-455935249-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16b66dfea80140689fa05c54842cdf96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b7a73c01-1bb9-4612-a1a7-16d71b732e81", "external-id": "nsx-vlan-transportzone-711", "segmentation_id": 711, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73644aa1-0c", "ovs_interfaceid": "73644aa1-0c58-40cd-8d52-00e4b388d8bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 645.262032] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:32:89:12', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b7a73c01-1bb9-4612-a1a7-16d71b732e81', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '73644aa1-0c58-40cd-8d52-00e4b388d8bf', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 645.267175] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Creating folder: Project (16b66dfea80140689fa05c54842cdf96). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 645.267532] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-05204e2e-2ad7-4427-b73c-7001b1205654 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.278708] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Created folder: Project (16b66dfea80140689fa05c54842cdf96) in parent group-v647729. [ 645.278924] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Creating folder: Instances. Parent ref: group-v647774. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 645.279990] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2a4053c8-69de-43e9-b7be-7763fb325135 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.291866] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Created folder: Instances in parent group-v647774. [ 645.291866] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 645.291866] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 645.295471] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a35a87dc-8f56-478a-a8e9-75bd247be648 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.328582] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 645.328582] env[69994]: value = "task-3241322" [ 645.328582] env[69994]: _type = "Task" [ 645.328582] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.340835] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241322, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.349773] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.593s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 645.350513] env[69994]: DEBUG nova.compute.manager [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 645.353778] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.487s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 645.355277] env[69994]: INFO nova.compute.claims [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 645.384713] env[69994]: DEBUG oslo_vmware.api [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241318, 'name': PowerOnVM_Task, 'duration_secs': 0.516641} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.388035] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 645.388311] env[69994]: DEBUG nova.compute.manager [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 645.389128] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acc1a403-9aaf-47c0-92e9-f228ac2cde5d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.409624] env[69994]: DEBUG oslo_vmware.api [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Task: {'id': task-3241315, 'name': ReconfigVM_Task, 'duration_secs': 0.747062} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.410542] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Reconfigured VM instance instance-0000000e to attach disk [datastore1] f109c803-bf37-4845-8956-4336dbc8a946/f109c803-bf37-4845-8956-4336dbc8a946.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 645.413877] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-57fa7d5e-54cd-4fdc-8df5-7a7158cb1b6e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.422263] env[69994]: DEBUG nova.network.neutron [req-8718b939-6a5d-4ed9-8698-190f540c58e0 req-40b60800-9ace-415b-a495-4513f3b7ff54 service nova] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Updated VIF entry in instance network info cache for port bcae7796-2595-4bff-96c1-d85a7cba05d8. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 645.422263] env[69994]: DEBUG nova.network.neutron [req-8718b939-6a5d-4ed9-8698-190f540c58e0 req-40b60800-9ace-415b-a495-4513f3b7ff54 service nova] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Updating instance_info_cache with network_info: [{"id": "bcae7796-2595-4bff-96c1-d85a7cba05d8", "address": "fa:16:3e:32:99:ad", "network": {"id": "c152967f-46eb-4bab-a1fd-985f8d1e41e3", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-381864411-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6356297e311c4b47b689a7cda41127f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "274afb4c-04df-4213-8ad2-8f48a10d78a8", "external-id": "nsx-vlan-transportzone-515", "segmentation_id": 515, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcae7796-25", "ovs_interfaceid": "bcae7796-2595-4bff-96c1-d85a7cba05d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 645.429368] env[69994]: DEBUG oslo_vmware.api [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Waiting for the task: (returnval){ [ 645.429368] env[69994]: value = "task-3241323" [ 645.429368] env[69994]: _type = "Task" [ 645.429368] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.443642] env[69994]: DEBUG oslo_vmware.api [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Task: {'id': task-3241323, 'name': Rename_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.538253] env[69994]: DEBUG oslo_vmware.api [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3241319, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.674593] env[69994]: DEBUG nova.compute.manager [req-12bfd094-35df-467a-b83e-df34afc0a667 req-f47b511c-3650-498a-b40e-e9fd62d605e6 service nova] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Received event network-vif-plugged-73644aa1-0c58-40cd-8d52-00e4b388d8bf {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 645.675650] env[69994]: DEBUG oslo_concurrency.lockutils [req-12bfd094-35df-467a-b83e-df34afc0a667 req-f47b511c-3650-498a-b40e-e9fd62d605e6 service nova] Acquiring lock "91bb882c-7b84-450f-bd03-91ea1ce739ce-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 645.675650] env[69994]: DEBUG oslo_concurrency.lockutils [req-12bfd094-35df-467a-b83e-df34afc0a667 req-f47b511c-3650-498a-b40e-e9fd62d605e6 service nova] Lock "91bb882c-7b84-450f-bd03-91ea1ce739ce-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 645.675811] env[69994]: DEBUG oslo_concurrency.lockutils [req-12bfd094-35df-467a-b83e-df34afc0a667 req-f47b511c-3650-498a-b40e-e9fd62d605e6 service nova] Lock "91bb882c-7b84-450f-bd03-91ea1ce739ce-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 645.676242] env[69994]: DEBUG nova.compute.manager [req-12bfd094-35df-467a-b83e-df34afc0a667 req-f47b511c-3650-498a-b40e-e9fd62d605e6 service nova] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] No waiting events found dispatching network-vif-plugged-73644aa1-0c58-40cd-8d52-00e4b388d8bf {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 645.676906] env[69994]: WARNING nova.compute.manager [req-12bfd094-35df-467a-b83e-df34afc0a667 req-f47b511c-3650-498a-b40e-e9fd62d605e6 service nova] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Received unexpected event network-vif-plugged-73644aa1-0c58-40cd-8d52-00e4b388d8bf for instance with vm_state building and task_state spawning. [ 645.676906] env[69994]: DEBUG nova.compute.manager [req-12bfd094-35df-467a-b83e-df34afc0a667 req-f47b511c-3650-498a-b40e-e9fd62d605e6 service nova] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Received event network-changed-73644aa1-0c58-40cd-8d52-00e4b388d8bf {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 645.676906] env[69994]: DEBUG nova.compute.manager [req-12bfd094-35df-467a-b83e-df34afc0a667 req-f47b511c-3650-498a-b40e-e9fd62d605e6 service nova] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Refreshing instance network info cache due to event network-changed-73644aa1-0c58-40cd-8d52-00e4b388d8bf. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 645.676906] env[69994]: DEBUG oslo_concurrency.lockutils [req-12bfd094-35df-467a-b83e-df34afc0a667 req-f47b511c-3650-498a-b40e-e9fd62d605e6 service nova] Acquiring lock "refresh_cache-91bb882c-7b84-450f-bd03-91ea1ce739ce" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.677342] env[69994]: DEBUG oslo_concurrency.lockutils [req-12bfd094-35df-467a-b83e-df34afc0a667 req-f47b511c-3650-498a-b40e-e9fd62d605e6 service nova] Acquired lock "refresh_cache-91bb882c-7b84-450f-bd03-91ea1ce739ce" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 645.677342] env[69994]: DEBUG nova.network.neutron [req-12bfd094-35df-467a-b83e-df34afc0a667 req-f47b511c-3650-498a-b40e-e9fd62d605e6 service nova] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Refreshing network info cache for port 73644aa1-0c58-40cd-8d52-00e4b388d8bf {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 645.842609] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241322, 'name': CreateVM_Task, 'duration_secs': 0.443066} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.842609] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 645.843033] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.843033] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 645.843344] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 645.843500] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47dbed18-e1d9-4d31-a523-744a887ff5ef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.848876] env[69994]: DEBUG oslo_vmware.api [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for the task: (returnval){ [ 645.848876] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52b988c5-2046-b5ec-0f69-745c528fca62" [ 645.848876] env[69994]: _type = "Task" [ 645.848876] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.856883] env[69994]: DEBUG oslo_vmware.api [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52b988c5-2046-b5ec-0f69-745c528fca62, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.860190] env[69994]: DEBUG nova.compute.utils [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 645.864168] env[69994]: DEBUG nova.network.neutron [req-c1df3047-1a88-4fb6-8f4d-1086c98940f1 req-78e6e0f9-00d9-4396-ab54-aa72ed9f4dbe service nova] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Updated VIF entry in instance network info cache for port b3347b62-0c9c-4b6c-8d07-587f2423850c. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 645.864365] env[69994]: DEBUG nova.network.neutron [req-c1df3047-1a88-4fb6-8f4d-1086c98940f1 req-78e6e0f9-00d9-4396-ab54-aa72ed9f4dbe service nova] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Updating instance_info_cache with network_info: [{"id": "b3347b62-0c9c-4b6c-8d07-587f2423850c", "address": "fa:16:3e:2c:18:d7", "network": {"id": "ac661ad7-377f-4b70-9be6-97e7a77207b9", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1514924772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7f33906db9fd416884267f628a3f05ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3347b62-0c", "ovs_interfaceid": "b3347b62-0c9c-4b6c-8d07-587f2423850c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 645.865365] env[69994]: DEBUG nova.compute.manager [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 645.865533] env[69994]: DEBUG nova.network.neutron [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 645.919484] env[69994]: DEBUG oslo_concurrency.lockutils [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 645.928111] env[69994]: DEBUG oslo_concurrency.lockutils [req-8718b939-6a5d-4ed9-8698-190f540c58e0 req-40b60800-9ace-415b-a495-4513f3b7ff54 service nova] Releasing lock "refresh_cache-7e7953f7-ed5d-4515-9181-93d343ad772d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 645.937446] env[69994]: DEBUG nova.policy [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9418087dfcfc45e78a3062949f15377f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '39d3a367749e4a169ce2ad95e4600d49', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 645.943925] env[69994]: DEBUG oslo_vmware.api [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Task: {'id': task-3241323, 'name': Rename_Task, 'duration_secs': 0.249194} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.944280] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 645.944555] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-623631d9-70bb-47a6-80cb-79b1b0a62ddb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.951875] env[69994]: DEBUG oslo_vmware.api [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Waiting for the task: (returnval){ [ 645.951875] env[69994]: value = "task-3241324" [ 645.951875] env[69994]: _type = "Task" [ 645.951875] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.960955] env[69994]: DEBUG oslo_vmware.api [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Task: {'id': task-3241324, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.041697] env[69994]: DEBUG oslo_vmware.api [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3241319, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.53677} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.045283] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 7e7953f7-ed5d-4515-9181-93d343ad772d/7e7953f7-ed5d-4515-9181-93d343ad772d.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 646.045283] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 646.045283] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5ba201b9-76fc-4153-a683-e1aaf6e3a2ae {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.048316] env[69994]: DEBUG oslo_vmware.api [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Waiting for the task: (returnval){ [ 646.048316] env[69994]: value = "task-3241325" [ 646.048316] env[69994]: _type = "Task" [ 646.048316] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.059658] env[69994]: DEBUG oslo_vmware.api [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3241325, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.069681] env[69994]: DEBUG nova.virt.hardware [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 646.070169] env[69994]: DEBUG nova.virt.hardware [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 646.070169] env[69994]: DEBUG nova.virt.hardware [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 646.070363] env[69994]: DEBUG nova.virt.hardware [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 646.070572] env[69994]: DEBUG nova.virt.hardware [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 646.071027] env[69994]: DEBUG nova.virt.hardware [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 646.071100] env[69994]: DEBUG nova.virt.hardware [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 646.071310] env[69994]: DEBUG nova.virt.hardware [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 646.071528] env[69994]: DEBUG nova.virt.hardware [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 646.071789] env[69994]: DEBUG nova.virt.hardware [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 646.072019] env[69994]: DEBUG nova.virt.hardware [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 646.072977] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d137a77-f551-4288-b812-45b721efbc45 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.086911] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc8f912a-579a-4ebe-8797-3034fdaeb054 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.105354] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Instance VIF info [] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 646.110062] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 646.110062] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 646.110341] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1bfd0962-917f-48ab-88b7-e960a99b14fe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.127407] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 646.127407] env[69994]: value = "task-3241326" [ 646.127407] env[69994]: _type = "Task" [ 646.127407] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.137294] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241326, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.361769] env[69994]: DEBUG oslo_vmware.api [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52b988c5-2046-b5ec-0f69-745c528fca62, 'name': SearchDatastore_Task, 'duration_secs': 0.053007} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.361958] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 646.363371] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 646.363371] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 646.363371] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 646.363503] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 646.364694] env[69994]: DEBUG nova.compute.manager [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 646.368603] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8da4e09e-0a90-4c8e-b44b-4b0195a6af22 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.374672] env[69994]: DEBUG oslo_concurrency.lockutils [req-c1df3047-1a88-4fb6-8f4d-1086c98940f1 req-78e6e0f9-00d9-4396-ab54-aa72ed9f4dbe service nova] Releasing lock "refresh_cache-2f710439-0216-401e-9759-af584f9bd00d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 646.382699] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 646.382699] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 646.383363] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbd6a765-6878-4321-af2d-e4ead5858365 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.393135] env[69994]: DEBUG oslo_vmware.api [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for the task: (returnval){ [ 646.393135] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52bf2b89-3234-a29e-5ad3-a9e4a35907ab" [ 646.393135] env[69994]: _type = "Task" [ 646.393135] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.401210] env[69994]: DEBUG oslo_vmware.api [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52bf2b89-3234-a29e-5ad3-a9e4a35907ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.466629] env[69994]: DEBUG oslo_vmware.api [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Task: {'id': task-3241324, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.542379] env[69994]: DEBUG nova.network.neutron [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Successfully created port: 097f8c85-cd23-443b-8f4a-aae58ce5d392 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 646.561391] env[69994]: DEBUG oslo_vmware.api [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3241325, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.105749} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.561391] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 646.561940] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9968e392-dde7-4fd9-99aa-fc2bb54038b3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.591332] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] 7e7953f7-ed5d-4515-9181-93d343ad772d/7e7953f7-ed5d-4515-9181-93d343ad772d.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 646.596665] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-473a0cc8-cf22-40c1-8676-de076ad0af2c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.619839] env[69994]: DEBUG oslo_vmware.api [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Waiting for the task: (returnval){ [ 646.619839] env[69994]: value = "task-3241327" [ 646.619839] env[69994]: _type = "Task" [ 646.619839] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.629314] env[69994]: DEBUG oslo_vmware.api [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3241327, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.642317] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241326, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.682675] env[69994]: DEBUG nova.network.neutron [req-12bfd094-35df-467a-b83e-df34afc0a667 req-f47b511c-3650-498a-b40e-e9fd62d605e6 service nova] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Updated VIF entry in instance network info cache for port 73644aa1-0c58-40cd-8d52-00e4b388d8bf. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 646.682737] env[69994]: DEBUG nova.network.neutron [req-12bfd094-35df-467a-b83e-df34afc0a667 req-f47b511c-3650-498a-b40e-e9fd62d605e6 service nova] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Updating instance_info_cache with network_info: [{"id": "73644aa1-0c58-40cd-8d52-00e4b388d8bf", "address": "fa:16:3e:32:89:12", "network": {"id": "e0a34d39-6f06-4258-9bc2-7b70aa37964a", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-455935249-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16b66dfea80140689fa05c54842cdf96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b7a73c01-1bb9-4612-a1a7-16d71b732e81", "external-id": "nsx-vlan-transportzone-711", "segmentation_id": 711, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73644aa1-0c", "ovs_interfaceid": "73644aa1-0c58-40cd-8d52-00e4b388d8bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 646.910387] env[69994]: DEBUG oslo_vmware.api [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52bf2b89-3234-a29e-5ad3-a9e4a35907ab, 'name': SearchDatastore_Task, 'duration_secs': 0.016444} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.911435] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-205b99fd-d06f-4e67-ac37-135ca75ed6be {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.918724] env[69994]: DEBUG oslo_vmware.api [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for the task: (returnval){ [ 646.918724] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52115ba2-0483-d7a7-6b6f-3ccee5565353" [ 646.918724] env[69994]: _type = "Task" [ 646.918724] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.929896] env[69994]: DEBUG oslo_vmware.api [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52115ba2-0483-d7a7-6b6f-3ccee5565353, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.962960] env[69994]: DEBUG oslo_vmware.api [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Task: {'id': task-3241324, 'name': PowerOnVM_Task, 'duration_secs': 0.967252} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.963340] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 646.963547] env[69994]: INFO nova.compute.manager [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Took 9.32 seconds to spawn the instance on the hypervisor. [ 646.963723] env[69994]: DEBUG nova.compute.manager [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 646.965033] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-add843e1-e524-46bb-b417-c46831add778 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.970017] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-894756cb-8518-4179-b6cf-8ed72b1adb76 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.986819] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa5eae1a-3e7d-4b91-a13f-1b79c8e67fa8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.017479] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-222f7860-9c30-40d9-9d61-01ba44455234 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.025749] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4257c6b1-8927-4c43-90a0-bd854cde45d5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.046252] env[69994]: DEBUG nova.compute.provider_tree [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 647.131574] env[69994]: DEBUG oslo_vmware.api [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3241327, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.142568] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241326, 'name': CreateVM_Task, 'duration_secs': 0.598183} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.142568] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 647.143582] env[69994]: DEBUG oslo_concurrency.lockutils [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.143582] env[69994]: DEBUG oslo_concurrency.lockutils [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 647.144178] env[69994]: DEBUG oslo_concurrency.lockutils [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 647.144250] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e18a5302-7f3a-4b35-9c3c-ee439fae6a05 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.150159] env[69994]: DEBUG oslo_vmware.api [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Waiting for the task: (returnval){ [ 647.150159] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]528fbe2e-a018-d935-0e25-375f5074c13f" [ 647.150159] env[69994]: _type = "Task" [ 647.150159] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.159097] env[69994]: DEBUG oslo_vmware.api [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]528fbe2e-a018-d935-0e25-375f5074c13f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.186227] env[69994]: DEBUG oslo_concurrency.lockutils [req-12bfd094-35df-467a-b83e-df34afc0a667 req-f47b511c-3650-498a-b40e-e9fd62d605e6 service nova] Releasing lock "refresh_cache-91bb882c-7b84-450f-bd03-91ea1ce739ce" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 647.388468] env[69994]: DEBUG nova.compute.manager [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 647.429982] env[69994]: DEBUG nova.virt.hardware [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 647.430389] env[69994]: DEBUG nova.virt.hardware [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 647.430836] env[69994]: DEBUG nova.virt.hardware [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 647.431148] env[69994]: DEBUG nova.virt.hardware [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 647.431415] env[69994]: DEBUG nova.virt.hardware [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 647.431837] env[69994]: DEBUG nova.virt.hardware [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 647.432169] env[69994]: DEBUG nova.virt.hardware [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 647.432420] env[69994]: DEBUG nova.virt.hardware [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 647.432861] env[69994]: DEBUG nova.virt.hardware [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 647.433275] env[69994]: DEBUG nova.virt.hardware [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 647.433422] env[69994]: DEBUG nova.virt.hardware [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 647.434969] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b86319b9-5001-4d3c-aff5-e555711260e1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.446425] env[69994]: DEBUG oslo_vmware.api [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52115ba2-0483-d7a7-6b6f-3ccee5565353, 'name': SearchDatastore_Task, 'duration_secs': 0.009132} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.450420] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 647.450736] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 91bb882c-7b84-450f-bd03-91ea1ce739ce/91bb882c-7b84-450f-bd03-91ea1ce739ce.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 647.451147] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-886b0a10-912a-4638-858c-1c6378fae454 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.456574] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd735257-884a-4d7b-ad7a-f18f620e7786 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.478720] env[69994]: DEBUG oslo_vmware.api [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for the task: (returnval){ [ 647.478720] env[69994]: value = "task-3241328" [ 647.478720] env[69994]: _type = "Task" [ 647.478720] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.494878] env[69994]: DEBUG oslo_vmware.api [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241328, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.495385] env[69994]: INFO nova.compute.manager [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Took 22.44 seconds to build instance. [ 647.551388] env[69994]: DEBUG nova.scheduler.client.report [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 647.628596] env[69994]: DEBUG oslo_vmware.api [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3241327, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.661386] env[69994]: DEBUG oslo_vmware.api [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]528fbe2e-a018-d935-0e25-375f5074c13f, 'name': SearchDatastore_Task, 'duration_secs': 0.010182} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.661698] env[69994]: DEBUG oslo_concurrency.lockutils [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 647.661930] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 647.662252] env[69994]: DEBUG oslo_concurrency.lockutils [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.662401] env[69994]: DEBUG oslo_concurrency.lockutils [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 647.662577] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 647.662863] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d26dc048-e8d2-4528-b383-a08b59ca0a0a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.673387] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 647.673574] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 647.674313] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e76437ac-3828-44b2-934d-9322cdd8c22c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.679721] env[69994]: DEBUG oslo_vmware.api [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Waiting for the task: (returnval){ [ 647.679721] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]523f23b8-2a6b-7ffb-328c-b24d8b897007" [ 647.679721] env[69994]: _type = "Task" [ 647.679721] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.689675] env[69994]: DEBUG oslo_vmware.api [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]523f23b8-2a6b-7ffb-328c-b24d8b897007, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.993912] env[69994]: DEBUG oslo_vmware.api [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241328, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.998247] env[69994]: DEBUG oslo_concurrency.lockutils [None req-df2c0409-7898-4825-89ba-dde4daa137a4 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Lock "f109c803-bf37-4845-8956-4336dbc8a946" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.406s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 648.056472] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.703s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 648.057059] env[69994]: DEBUG nova.compute.manager [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 648.061337] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 15.037s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 648.065255] env[69994]: DEBUG nova.compute.manager [req-9524dc3d-85f6-4b86-a3d4-6cff52b48d5b req-bda56c85-6f5a-41f0-87b5-9f1827118246 service nova] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Received event network-changed-a16b9fc2-06de-47cb-b39f-b77130ed0eec {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 648.065255] env[69994]: DEBUG nova.compute.manager [req-9524dc3d-85f6-4b86-a3d4-6cff52b48d5b req-bda56c85-6f5a-41f0-87b5-9f1827118246 service nova] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Refreshing instance network info cache due to event network-changed-a16b9fc2-06de-47cb-b39f-b77130ed0eec. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 648.065255] env[69994]: DEBUG oslo_concurrency.lockutils [req-9524dc3d-85f6-4b86-a3d4-6cff52b48d5b req-bda56c85-6f5a-41f0-87b5-9f1827118246 service nova] Acquiring lock "refresh_cache-b003b7c2-e754-440e-8a65-13c5e9c68cd5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 648.065255] env[69994]: DEBUG oslo_concurrency.lockutils [req-9524dc3d-85f6-4b86-a3d4-6cff52b48d5b req-bda56c85-6f5a-41f0-87b5-9f1827118246 service nova] Acquired lock "refresh_cache-b003b7c2-e754-440e-8a65-13c5e9c68cd5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 648.065255] env[69994]: DEBUG nova.network.neutron [req-9524dc3d-85f6-4b86-a3d4-6cff52b48d5b req-bda56c85-6f5a-41f0-87b5-9f1827118246 service nova] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Refreshing network info cache for port a16b9fc2-06de-47cb-b39f-b77130ed0eec {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 648.131877] env[69994]: DEBUG oslo_vmware.api [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3241327, 'name': ReconfigVM_Task, 'duration_secs': 1.483934} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.132217] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Reconfigured VM instance instance-0000000f to attach disk [datastore1] 7e7953f7-ed5d-4515-9181-93d343ad772d/7e7953f7-ed5d-4515-9181-93d343ad772d.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 648.132824] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6ca173a4-0964-47cf-bd70-fc34e9b94091 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.142294] env[69994]: DEBUG oslo_vmware.api [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Waiting for the task: (returnval){ [ 648.142294] env[69994]: value = "task-3241329" [ 648.142294] env[69994]: _type = "Task" [ 648.142294] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.156441] env[69994]: DEBUG oslo_vmware.api [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3241329, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.193515] env[69994]: DEBUG oslo_vmware.api [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]523f23b8-2a6b-7ffb-328c-b24d8b897007, 'name': SearchDatastore_Task, 'duration_secs': 0.031859} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.194456] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe4fba21-dc04-4d76-a642-8c42c3a214a6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.203353] env[69994]: DEBUG oslo_vmware.api [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Waiting for the task: (returnval){ [ 648.203353] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]527f9087-5be8-1b47-eb97-5cf2ab02603c" [ 648.203353] env[69994]: _type = "Task" [ 648.203353] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.214479] env[69994]: DEBUG oslo_vmware.api [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]527f9087-5be8-1b47-eb97-5cf2ab02603c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.498127] env[69994]: DEBUG oslo_vmware.api [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241328, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.505629] env[69994]: DEBUG nova.compute.manager [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 648.561127] env[69994]: DEBUG nova.network.neutron [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Successfully updated port: 097f8c85-cd23-443b-8f4a-aae58ce5d392 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 648.569419] env[69994]: DEBUG nova.compute.utils [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 648.577126] env[69994]: INFO nova.compute.claims [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 648.585105] env[69994]: DEBUG nova.compute.manager [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 648.585354] env[69994]: DEBUG nova.network.neutron [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 648.657626] env[69994]: DEBUG oslo_vmware.api [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3241329, 'name': Rename_Task, 'duration_secs': 0.438293} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.657783] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 648.658089] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5ba6d69f-1b63-4384-912f-b5208e12cf3e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.663397] env[69994]: DEBUG nova.policy [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '332c174655374c799bb181a29701473e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8fb027b5b61c43cdbac3c89eb1e0f2a1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 648.669372] env[69994]: DEBUG oslo_vmware.api [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Waiting for the task: (returnval){ [ 648.669372] env[69994]: value = "task-3241330" [ 648.669372] env[69994]: _type = "Task" [ 648.669372] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.678829] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquiring lock "493c2d85-eef5-44ae-acfc-2744685135ca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 648.679206] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "493c2d85-eef5-44ae-acfc-2744685135ca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 648.683482] env[69994]: DEBUG oslo_vmware.api [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3241330, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.705707] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquiring lock "e8b4640f-302d-43cd-a654-c42f9cb34766" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 648.705810] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "e8b4640f-302d-43cd-a654-c42f9cb34766" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 648.719022] env[69994]: DEBUG oslo_vmware.api [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]527f9087-5be8-1b47-eb97-5cf2ab02603c, 'name': SearchDatastore_Task, 'duration_secs': 0.018749} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.719022] env[69994]: DEBUG oslo_concurrency.lockutils [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 648.719022] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 21f66039-6292-4d9c-b97d-668d029def24/21f66039-6292-4d9c-b97d-668d029def24.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 648.719022] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3edfd0a2-78a4-4aac-8414-afd42384eec1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.723856] env[69994]: DEBUG oslo_vmware.api [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Waiting for the task: (returnval){ [ 648.723856] env[69994]: value = "task-3241331" [ 648.723856] env[69994]: _type = "Task" [ 648.723856] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.734697] env[69994]: DEBUG oslo_vmware.api [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': task-3241331, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.762381] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7fd06ea0-ee52-424f-97a9-fc86e82a3d85 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Acquiring lock "84efe900-1d79-42f9-b3c6-54299757cdbc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 648.762381] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7fd06ea0-ee52-424f-97a9-fc86e82a3d85 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Lock "84efe900-1d79-42f9-b3c6-54299757cdbc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 648.762381] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7fd06ea0-ee52-424f-97a9-fc86e82a3d85 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Acquiring lock "84efe900-1d79-42f9-b3c6-54299757cdbc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 648.762606] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7fd06ea0-ee52-424f-97a9-fc86e82a3d85 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Lock "84efe900-1d79-42f9-b3c6-54299757cdbc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 648.763854] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7fd06ea0-ee52-424f-97a9-fc86e82a3d85 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Lock "84efe900-1d79-42f9-b3c6-54299757cdbc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 648.766758] env[69994]: INFO nova.compute.manager [None req-7fd06ea0-ee52-424f-97a9-fc86e82a3d85 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Terminating instance [ 648.993091] env[69994]: DEBUG oslo_vmware.api [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241328, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.058986} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.993091] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 91bb882c-7b84-450f-bd03-91ea1ce739ce/91bb882c-7b84-450f-bd03-91ea1ce739ce.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 648.993091] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 648.993091] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-287416d2-da3b-4408-8353-f793bc120b38 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.999021] env[69994]: DEBUG oslo_vmware.api [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for the task: (returnval){ [ 648.999021] env[69994]: value = "task-3241332" [ 648.999021] env[69994]: _type = "Task" [ 648.999021] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.005784] env[69994]: DEBUG oslo_vmware.api [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241332, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.037388] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 649.067319] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Acquiring lock "refresh_cache-5badecfd-5784-4968-8519-419a01c67465" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.067319] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Acquired lock "refresh_cache-5badecfd-5784-4968-8519-419a01c67465" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 649.067319] env[69994]: DEBUG nova.network.neutron [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 649.083673] env[69994]: DEBUG nova.compute.manager [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 649.092218] env[69994]: INFO nova.compute.resource_tracker [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Updating resource usage from migration d1d290b1-0dbc-4360-9317-4b05f33c89a3 [ 649.167514] env[69994]: DEBUG nova.network.neutron [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Successfully created port: 641f1973-439b-47b8-a402-9d7a8557e0c2 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 649.180917] env[69994]: DEBUG oslo_vmware.api [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3241330, 'name': PowerOnVM_Task} progress is 78%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.234192] env[69994]: DEBUG nova.network.neutron [req-9524dc3d-85f6-4b86-a3d4-6cff52b48d5b req-bda56c85-6f5a-41f0-87b5-9f1827118246 service nova] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Updated VIF entry in instance network info cache for port a16b9fc2-06de-47cb-b39f-b77130ed0eec. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 649.234585] env[69994]: DEBUG nova.network.neutron [req-9524dc3d-85f6-4b86-a3d4-6cff52b48d5b req-bda56c85-6f5a-41f0-87b5-9f1827118246 service nova] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Updating instance_info_cache with network_info: [{"id": "a16b9fc2-06de-47cb-b39f-b77130ed0eec", "address": "fa:16:3e:45:f4:b2", "network": {"id": "e6b2a322-ac01-400c-a8ec-68a371b2061c", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-141203958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.174", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e8b8ab56b87c46f9b960fc3b430197d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa16b9fc2-06", "ovs_interfaceid": "a16b9fc2-06de-47cb-b39f-b77130ed0eec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 649.240631] env[69994]: DEBUG oslo_vmware.api [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': task-3241331, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.273567] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7fd06ea0-ee52-424f-97a9-fc86e82a3d85 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Acquiring lock "refresh_cache-84efe900-1d79-42f9-b3c6-54299757cdbc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.273567] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7fd06ea0-ee52-424f-97a9-fc86e82a3d85 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Acquired lock "refresh_cache-84efe900-1d79-42f9-b3c6-54299757cdbc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 649.273567] env[69994]: DEBUG nova.network.neutron [None req-7fd06ea0-ee52-424f-97a9-fc86e82a3d85 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 649.512831] env[69994]: DEBUG oslo_vmware.api [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241332, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070063} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.513620] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 649.515056] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2ea419c-5ae7-451d-9f58-d8f1f9e35a04 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.539443] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Reconfiguring VM instance instance-00000010 to attach disk [datastore1] 91bb882c-7b84-450f-bd03-91ea1ce739ce/91bb882c-7b84-450f-bd03-91ea1ce739ce.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 649.543325] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c2870d2d-8b1f-487f-9809-102596a3f027 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.567516] env[69994]: DEBUG oslo_vmware.api [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for the task: (returnval){ [ 649.567516] env[69994]: value = "task-3241333" [ 649.567516] env[69994]: _type = "Task" [ 649.567516] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.587624] env[69994]: DEBUG oslo_vmware.api [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241333, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.628178] env[69994]: DEBUG nova.network.neutron [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 649.684830] env[69994]: DEBUG oslo_vmware.api [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3241330, 'name': PowerOnVM_Task, 'duration_secs': 0.916678} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.685353] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 649.685611] env[69994]: INFO nova.compute.manager [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Took 9.51 seconds to spawn the instance on the hypervisor. [ 649.685830] env[69994]: DEBUG nova.compute.manager [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 649.686675] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-981bc1ff-78ad-4edb-b563-7eea51811e21 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.744677] env[69994]: DEBUG oslo_concurrency.lockutils [req-9524dc3d-85f6-4b86-a3d4-6cff52b48d5b req-bda56c85-6f5a-41f0-87b5-9f1827118246 service nova] Releasing lock "refresh_cache-b003b7c2-e754-440e-8a65-13c5e9c68cd5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 649.745379] env[69994]: DEBUG oslo_vmware.api [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': task-3241331, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.846713} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.746033] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 21f66039-6292-4d9c-b97d-668d029def24/21f66039-6292-4d9c-b97d-668d029def24.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 649.746475] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 649.746675] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-66a6acf6-616b-4901-91f8-f88dec9c9856 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.753697] env[69994]: DEBUG oslo_vmware.api [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Waiting for the task: (returnval){ [ 649.753697] env[69994]: value = "task-3241334" [ 649.753697] env[69994]: _type = "Task" [ 649.753697] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.761289] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4abcdbbb-b296-44ab-8653-6b1229188dc8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.769756] env[69994]: DEBUG oslo_vmware.api [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': task-3241334, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.772789] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ccb5187-4d8f-4eab-9abc-cee908605e46 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.807903] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-115f3477-1e43-49e4-96d1-d2d240951cd3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.816280] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0912b6c6-b549-4e7a-a082-ca241dd02bb3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.832315] env[69994]: DEBUG nova.compute.provider_tree [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 649.847298] env[69994]: DEBUG nova.network.neutron [None req-7fd06ea0-ee52-424f-97a9-fc86e82a3d85 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 649.884956] env[69994]: DEBUG nova.compute.manager [req-46691352-c7d8-4e15-a97a-b4b534fcbb30 req-e47a34bc-d899-4428-8003-f4126467e36d service nova] [instance: 5badecfd-5784-4968-8519-419a01c67465] Received event network-vif-plugged-097f8c85-cd23-443b-8f4a-aae58ce5d392 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 649.885209] env[69994]: DEBUG oslo_concurrency.lockutils [req-46691352-c7d8-4e15-a97a-b4b534fcbb30 req-e47a34bc-d899-4428-8003-f4126467e36d service nova] Acquiring lock "5badecfd-5784-4968-8519-419a01c67465-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 649.885416] env[69994]: DEBUG oslo_concurrency.lockutils [req-46691352-c7d8-4e15-a97a-b4b534fcbb30 req-e47a34bc-d899-4428-8003-f4126467e36d service nova] Lock "5badecfd-5784-4968-8519-419a01c67465-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 649.885578] env[69994]: DEBUG oslo_concurrency.lockutils [req-46691352-c7d8-4e15-a97a-b4b534fcbb30 req-e47a34bc-d899-4428-8003-f4126467e36d service nova] Lock "5badecfd-5784-4968-8519-419a01c67465-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 649.885740] env[69994]: DEBUG nova.compute.manager [req-46691352-c7d8-4e15-a97a-b4b534fcbb30 req-e47a34bc-d899-4428-8003-f4126467e36d service nova] [instance: 5badecfd-5784-4968-8519-419a01c67465] No waiting events found dispatching network-vif-plugged-097f8c85-cd23-443b-8f4a-aae58ce5d392 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 649.885895] env[69994]: WARNING nova.compute.manager [req-46691352-c7d8-4e15-a97a-b4b534fcbb30 req-e47a34bc-d899-4428-8003-f4126467e36d service nova] [instance: 5badecfd-5784-4968-8519-419a01c67465] Received unexpected event network-vif-plugged-097f8c85-cd23-443b-8f4a-aae58ce5d392 for instance with vm_state building and task_state spawning. [ 650.045934] env[69994]: DEBUG nova.network.neutron [None req-7fd06ea0-ee52-424f-97a9-fc86e82a3d85 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.083401] env[69994]: DEBUG oslo_vmware.api [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241333, 'name': ReconfigVM_Task, 'duration_secs': 0.35731} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.083961] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Reconfigured VM instance instance-00000010 to attach disk [datastore1] 91bb882c-7b84-450f-bd03-91ea1ce739ce/91bb882c-7b84-450f-bd03-91ea1ce739ce.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 650.084414] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eaf44cdf-93d7-4475-9a33-11f88b1fda60 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.090395] env[69994]: DEBUG oslo_vmware.api [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for the task: (returnval){ [ 650.090395] env[69994]: value = "task-3241335" [ 650.090395] env[69994]: _type = "Task" [ 650.090395] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.094398] env[69994]: DEBUG nova.compute.manager [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 650.104975] env[69994]: DEBUG oslo_vmware.api [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241335, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.106030] env[69994]: DEBUG nova.network.neutron [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Updating instance_info_cache with network_info: [{"id": "097f8c85-cd23-443b-8f4a-aae58ce5d392", "address": "fa:16:3e:c9:7d:47", "network": {"id": "2fcc4ee1-e8f5-4288-9c43-e97a20e8e0e7", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-471387069-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39d3a367749e4a169ce2ad95e4600d49", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b107fab-ee71-47db-ad4d-3c6f05546843", "external-id": "cl2-zone-554", "segmentation_id": 554, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap097f8c85-cd", "ovs_interfaceid": "097f8c85-cd23-443b-8f4a-aae58ce5d392", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.137466] env[69994]: DEBUG nova.virt.hardware [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 650.138105] env[69994]: DEBUG nova.virt.hardware [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 650.138377] env[69994]: DEBUG nova.virt.hardware [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 650.138603] env[69994]: DEBUG nova.virt.hardware [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 650.138961] env[69994]: DEBUG nova.virt.hardware [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 650.139140] env[69994]: DEBUG nova.virt.hardware [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 650.139559] env[69994]: DEBUG nova.virt.hardware [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 650.139745] env[69994]: DEBUG nova.virt.hardware [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 650.139922] env[69994]: DEBUG nova.virt.hardware [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 650.140135] env[69994]: DEBUG nova.virt.hardware [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 650.140346] env[69994]: DEBUG nova.virt.hardware [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 650.141595] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cb2271e-0e6c-4d72-a654-731cc8edd392 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.151097] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2180de79-c357-4153-b4c6-b048b22ab0e6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.207936] env[69994]: INFO nova.compute.manager [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Took 24.02 seconds to build instance. [ 650.264472] env[69994]: DEBUG oslo_vmware.api [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': task-3241334, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.354523} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.265041] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 650.265617] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bfd15f9-f247-4e14-8bb6-24eee13eacc3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.286433] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Reconfiguring VM instance instance-0000000d to attach disk [datastore2] 21f66039-6292-4d9c-b97d-668d029def24/21f66039-6292-4d9c-b97d-668d029def24.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 650.286756] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-49d8d4f8-5e86-4c8c-8926-17805864e596 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.310312] env[69994]: DEBUG oslo_vmware.api [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Waiting for the task: (returnval){ [ 650.310312] env[69994]: value = "task-3241336" [ 650.310312] env[69994]: _type = "Task" [ 650.310312] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.318599] env[69994]: DEBUG oslo_vmware.api [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': task-3241336, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.336287] env[69994]: DEBUG nova.scheduler.client.report [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 650.550306] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7fd06ea0-ee52-424f-97a9-fc86e82a3d85 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Releasing lock "refresh_cache-84efe900-1d79-42f9-b3c6-54299757cdbc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 650.550803] env[69994]: DEBUG nova.compute.manager [None req-7fd06ea0-ee52-424f-97a9-fc86e82a3d85 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 650.551056] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7fd06ea0-ee52-424f-97a9-fc86e82a3d85 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 650.552240] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5536fa14-e947-4aa5-9847-6e24ed637e21 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.559752] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fd06ea0-ee52-424f-97a9-fc86e82a3d85 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 650.559998] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7106f910-2ee5-4a13-a3dc-ab6986f0684e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.566596] env[69994]: DEBUG oslo_vmware.api [None req-7fd06ea0-ee52-424f-97a9-fc86e82a3d85 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Waiting for the task: (returnval){ [ 650.566596] env[69994]: value = "task-3241337" [ 650.566596] env[69994]: _type = "Task" [ 650.566596] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.574583] env[69994]: DEBUG oslo_vmware.api [None req-7fd06ea0-ee52-424f-97a9-fc86e82a3d85 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241337, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.601495] env[69994]: DEBUG oslo_vmware.api [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241335, 'name': Rename_Task, 'duration_secs': 0.142796} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.601925] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 650.602395] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1e7bc0f9-180a-4242-91a3-4077632b9ba1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.609267] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Releasing lock "refresh_cache-5badecfd-5784-4968-8519-419a01c67465" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 650.609655] env[69994]: DEBUG nova.compute.manager [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Instance network_info: |[{"id": "097f8c85-cd23-443b-8f4a-aae58ce5d392", "address": "fa:16:3e:c9:7d:47", "network": {"id": "2fcc4ee1-e8f5-4288-9c43-e97a20e8e0e7", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-471387069-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39d3a367749e4a169ce2ad95e4600d49", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b107fab-ee71-47db-ad4d-3c6f05546843", "external-id": "cl2-zone-554", "segmentation_id": 554, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap097f8c85-cd", "ovs_interfaceid": "097f8c85-cd23-443b-8f4a-aae58ce5d392", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 650.611280] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c9:7d:47', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b107fab-ee71-47db-ad4d-3c6f05546843', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '097f8c85-cd23-443b-8f4a-aae58ce5d392', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 650.619011] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Creating folder: Project (39d3a367749e4a169ce2ad95e4600d49). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 650.619394] env[69994]: DEBUG oslo_vmware.api [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for the task: (returnval){ [ 650.619394] env[69994]: value = "task-3241338" [ 650.619394] env[69994]: _type = "Task" [ 650.619394] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.619563] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-57fdc0b2-ca5b-4387-b09d-0acb6c66bfc9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.630781] env[69994]: DEBUG oslo_vmware.api [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241338, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.634310] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Created folder: Project (39d3a367749e4a169ce2ad95e4600d49) in parent group-v647729. [ 650.634568] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Creating folder: Instances. Parent ref: group-v647778. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 650.634722] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-22804b41-a6d1-462d-9406-02cd473049e5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.645407] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Created folder: Instances in parent group-v647778. [ 650.645683] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 650.645880] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5badecfd-5784-4968-8519-419a01c67465] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 650.646109] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-969955a1-8a2b-47f7-a7ce-5941b72b93df {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.667251] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 650.667251] env[69994]: value = "task-3241341" [ 650.667251] env[69994]: _type = "Task" [ 650.667251] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.676440] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241341, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.710587] env[69994]: DEBUG oslo_concurrency.lockutils [None req-02055a3b-da47-4f48-a81a-582ac3d4431e tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Lock "7e7953f7-ed5d-4515-9181-93d343ad772d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.112s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 650.825489] env[69994]: DEBUG oslo_vmware.api [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': task-3241336, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.842376] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.781s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 650.843976] env[69994]: INFO nova.compute.manager [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Migrating [ 650.844307] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.844583] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquired lock "compute-rpcapi-router" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 650.846050] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.600s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 650.850592] env[69994]: INFO nova.compute.claims [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 650.852174] env[69994]: INFO nova.compute.rpcapi [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Automatically selected compute RPC version 6.4 from minimum service version 68 [ 650.852652] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Releasing lock "compute-rpcapi-router" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 651.019276] env[69994]: DEBUG nova.network.neutron [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Successfully updated port: 641f1973-439b-47b8-a402-9d7a8557e0c2 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 651.079684] env[69994]: DEBUG oslo_vmware.api [None req-7fd06ea0-ee52-424f-97a9-fc86e82a3d85 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241337, 'name': PowerOffVM_Task, 'duration_secs': 0.227203} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.079977] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fd06ea0-ee52-424f-97a9-fc86e82a3d85 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 651.080166] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7fd06ea0-ee52-424f-97a9-fc86e82a3d85 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 651.080514] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9de5cf3e-4947-4638-89f1-711b66507cc9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.112611] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7fd06ea0-ee52-424f-97a9-fc86e82a3d85 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 651.112773] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7fd06ea0-ee52-424f-97a9-fc86e82a3d85 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 651.112973] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fd06ea0-ee52-424f-97a9-fc86e82a3d85 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Deleting the datastore file [datastore1] 84efe900-1d79-42f9-b3c6-54299757cdbc {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 651.113285] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cf699157-25af-4bd6-9eb6-70aa7aceaf43 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.120919] env[69994]: DEBUG oslo_vmware.api [None req-7fd06ea0-ee52-424f-97a9-fc86e82a3d85 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Waiting for the task: (returnval){ [ 651.120919] env[69994]: value = "task-3241343" [ 651.120919] env[69994]: _type = "Task" [ 651.120919] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.134691] env[69994]: DEBUG oslo_vmware.api [None req-7fd06ea0-ee52-424f-97a9-fc86e82a3d85 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241343, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.137964] env[69994]: DEBUG oslo_vmware.api [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241338, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.179922] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241341, 'name': CreateVM_Task, 'duration_secs': 0.427215} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.180194] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5badecfd-5784-4968-8519-419a01c67465] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 651.181887] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 651.182023] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 651.182593] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 651.182866] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac334281-1853-4926-a9a2-8ccbf478f583 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.188032] env[69994]: DEBUG oslo_vmware.api [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Waiting for the task: (returnval){ [ 651.188032] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52bf7156-1917-add4-5608-e3bee39a4d63" [ 651.188032] env[69994]: _type = "Task" [ 651.188032] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.198854] env[69994]: DEBUG oslo_vmware.api [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52bf7156-1917-add4-5608-e3bee39a4d63, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.214158] env[69994]: DEBUG nova.compute.manager [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 651.325413] env[69994]: DEBUG oslo_vmware.api [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': task-3241336, 'name': ReconfigVM_Task, 'duration_secs': 0.685798} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.326094] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Reconfigured VM instance instance-0000000d to attach disk [datastore2] 21f66039-6292-4d9c-b97d-668d029def24/21f66039-6292-4d9c-b97d-668d029def24.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 651.327628] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1957ff58-584f-4863-8294-ecd3c84e9464 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.335723] env[69994]: DEBUG oslo_vmware.api [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Waiting for the task: (returnval){ [ 651.335723] env[69994]: value = "task-3241344" [ 651.335723] env[69994]: _type = "Task" [ 651.335723] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.348134] env[69994]: DEBUG oslo_vmware.api [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': task-3241344, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.374223] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquiring lock "refresh_cache-dbad6bed-64ba-4dfd-abad-c0b2c775ba2c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 651.374389] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquired lock "refresh_cache-dbad6bed-64ba-4dfd-abad-c0b2c775ba2c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 651.374569] env[69994]: DEBUG nova.network.neutron [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 651.527719] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Acquiring lock "refresh_cache-c47c26c8-3f7f-436b-95aa-0bd08d41e62b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 651.527719] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Acquired lock "refresh_cache-c47c26c8-3f7f-436b-95aa-0bd08d41e62b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 651.527719] env[69994]: DEBUG nova.network.neutron [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 651.633756] env[69994]: DEBUG oslo_vmware.api [None req-7fd06ea0-ee52-424f-97a9-fc86e82a3d85 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241343, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140915} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.637555] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fd06ea0-ee52-424f-97a9-fc86e82a3d85 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 651.637774] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7fd06ea0-ee52-424f-97a9-fc86e82a3d85 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 651.637995] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7fd06ea0-ee52-424f-97a9-fc86e82a3d85 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 651.638325] env[69994]: INFO nova.compute.manager [None req-7fd06ea0-ee52-424f-97a9-fc86e82a3d85 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Took 1.09 seconds to destroy the instance on the hypervisor. [ 651.638566] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7fd06ea0-ee52-424f-97a9-fc86e82a3d85 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 651.638842] env[69994]: DEBUG oslo_vmware.api [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241338, 'name': PowerOnVM_Task, 'duration_secs': 0.596027} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.639863] env[69994]: DEBUG nova.compute.manager [-] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 651.639863] env[69994]: DEBUG nova.network.neutron [-] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 651.641826] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 651.641826] env[69994]: INFO nova.compute.manager [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Took 8.96 seconds to spawn the instance on the hypervisor. [ 651.641826] env[69994]: DEBUG nova.compute.manager [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 651.643870] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38a94f1f-3158-4691-bb64-aa630ec5f893 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.667613] env[69994]: DEBUG nova.network.neutron [-] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 651.699037] env[69994]: DEBUG oslo_vmware.api [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52bf7156-1917-add4-5608-e3bee39a4d63, 'name': SearchDatastore_Task, 'duration_secs': 0.012659} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.699764] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 651.701217] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 651.701217] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 651.701217] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 651.701217] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 651.701668] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2477b57a-8f61-4171-9afa-65ab5f3a5044 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.712772] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 651.712961] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 651.713721] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3957581a-bb84-4d8c-a9f8-fb3f49f5e3cf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.721275] env[69994]: DEBUG oslo_vmware.api [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Waiting for the task: (returnval){ [ 651.721275] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5285e973-2895-cb80-f258-18fcdb365dcb" [ 651.721275] env[69994]: _type = "Task" [ 651.721275] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.734263] env[69994]: DEBUG oslo_vmware.api [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5285e973-2895-cb80-f258-18fcdb365dcb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.749946] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 651.847630] env[69994]: DEBUG oslo_vmware.api [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': task-3241344, 'name': Rename_Task, 'duration_secs': 0.265503} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.847952] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 651.848402] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5b56218d-225f-4592-a40d-6aa1c1cc8799 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.856210] env[69994]: DEBUG oslo_vmware.api [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Waiting for the task: (returnval){ [ 651.856210] env[69994]: value = "task-3241345" [ 651.856210] env[69994]: _type = "Task" [ 651.856210] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.869420] env[69994]: DEBUG oslo_vmware.api [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': task-3241345, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.137906] env[69994]: DEBUG nova.network.neutron [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 652.169422] env[69994]: INFO nova.compute.manager [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Took 25.87 seconds to build instance. [ 652.170612] env[69994]: DEBUG nova.network.neutron [-] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 652.253168] env[69994]: DEBUG oslo_vmware.api [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5285e973-2895-cb80-f258-18fcdb365dcb, 'name': SearchDatastore_Task, 'duration_secs': 0.022272} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.254073] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28e46ff1-9f63-4c3b-981c-56eeae861fe1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.263465] env[69994]: DEBUG oslo_vmware.api [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Waiting for the task: (returnval){ [ 652.263465] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52376ebb-69b0-a354-9b23-bb058b5ad8ba" [ 652.263465] env[69994]: _type = "Task" [ 652.263465] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.285674] env[69994]: DEBUG oslo_vmware.api [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52376ebb-69b0-a354-9b23-bb058b5ad8ba, 'name': SearchDatastore_Task, 'duration_secs': 0.011397} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.285951] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 652.286219] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 5badecfd-5784-4968-8519-419a01c67465/5badecfd-5784-4968-8519-419a01c67465.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 652.286472] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a9339917-49c2-4ebe-8ffe-46548343b97d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.294520] env[69994]: DEBUG oslo_vmware.api [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Waiting for the task: (returnval){ [ 652.294520] env[69994]: value = "task-3241346" [ 652.294520] env[69994]: _type = "Task" [ 652.294520] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.305730] env[69994]: DEBUG oslo_vmware.api [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Task: {'id': task-3241346, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.368653] env[69994]: DEBUG oslo_vmware.api [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': task-3241345, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.426274] env[69994]: DEBUG nova.network.neutron [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Updating instance_info_cache with network_info: [{"id": "641f1973-439b-47b8-a402-9d7a8557e0c2", "address": "fa:16:3e:11:89:23", "network": {"id": "0edf701f-8d15-4cdd-a234-ebea64cfa425", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-580718484-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8fb027b5b61c43cdbac3c89eb1e0f2a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap641f1973-43", "ovs_interfaceid": "641f1973-439b-47b8-a402-9d7a8557e0c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 652.436896] env[69994]: DEBUG nova.network.neutron [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Updating instance_info_cache with network_info: [{"id": "1c4ae184-b8b0-409f-aff4-5568af2af1b9", "address": "fa:16:3e:23:d3:c2", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.61", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c4ae184-b8", "ovs_interfaceid": "1c4ae184-b8b0-409f-aff4-5568af2af1b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 652.531048] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d865db3-ae6e-4c8f-854c-9f2001d47432 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.540074] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73cb2c15-4878-4618-a4b3-6aaada51171d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.571571] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4121b223-06ad-4003-8030-1d162eb3cfa6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.581281] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1a581ea-7a76-4734-b942-0385c212e393 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.604178] env[69994]: DEBUG nova.compute.provider_tree [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 652.675645] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f00a7ef3-425a-48e6-b0e3-2b0eaa1966ba tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Lock "91bb882c-7b84-450f-bd03-91ea1ce739ce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.916s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 652.678614] env[69994]: INFO nova.compute.manager [-] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Took 1.04 seconds to deallocate network for instance. [ 652.806291] env[69994]: DEBUG oslo_vmware.api [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Task: {'id': task-3241346, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.867974] env[69994]: DEBUG oslo_vmware.api [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': task-3241345, 'name': PowerOnVM_Task, 'duration_secs': 0.668058} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.868722] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 652.868722] env[69994]: DEBUG nova.compute.manager [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 652.869439] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f915646-8e4b-4851-8bdd-21d2541b0874 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.899438] env[69994]: DEBUG nova.compute.manager [req-9173a4de-e82b-463e-b8d3-e54d4b538ed9 req-f8d9bf30-642a-4dee-9da8-bb4d069bc1d1 service nova] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Received event network-changed-b3347b62-0c9c-4b6c-8d07-587f2423850c {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 652.899725] env[69994]: DEBUG nova.compute.manager [req-9173a4de-e82b-463e-b8d3-e54d4b538ed9 req-f8d9bf30-642a-4dee-9da8-bb4d069bc1d1 service nova] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Refreshing instance network info cache due to event network-changed-b3347b62-0c9c-4b6c-8d07-587f2423850c. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 652.901771] env[69994]: DEBUG oslo_concurrency.lockutils [req-9173a4de-e82b-463e-b8d3-e54d4b538ed9 req-f8d9bf30-642a-4dee-9da8-bb4d069bc1d1 service nova] Acquiring lock "refresh_cache-2f710439-0216-401e-9759-af584f9bd00d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 652.901771] env[69994]: DEBUG oslo_concurrency.lockutils [req-9173a4de-e82b-463e-b8d3-e54d4b538ed9 req-f8d9bf30-642a-4dee-9da8-bb4d069bc1d1 service nova] Acquired lock "refresh_cache-2f710439-0216-401e-9759-af584f9bd00d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 652.901771] env[69994]: DEBUG nova.network.neutron [req-9173a4de-e82b-463e-b8d3-e54d4b538ed9 req-f8d9bf30-642a-4dee-9da8-bb4d069bc1d1 service nova] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Refreshing network info cache for port b3347b62-0c9c-4b6c-8d07-587f2423850c {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 652.926595] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Releasing lock "refresh_cache-c47c26c8-3f7f-436b-95aa-0bd08d41e62b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 652.926911] env[69994]: DEBUG nova.compute.manager [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Instance network_info: |[{"id": "641f1973-439b-47b8-a402-9d7a8557e0c2", "address": "fa:16:3e:11:89:23", "network": {"id": "0edf701f-8d15-4cdd-a234-ebea64cfa425", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-580718484-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8fb027b5b61c43cdbac3c89eb1e0f2a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap641f1973-43", "ovs_interfaceid": "641f1973-439b-47b8-a402-9d7a8557e0c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 652.927687] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:11:89:23', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '641f1973-439b-47b8-a402-9d7a8557e0c2', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 652.941596] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Creating folder: Project (8fb027b5b61c43cdbac3c89eb1e0f2a1). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 652.943954] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Releasing lock "refresh_cache-dbad6bed-64ba-4dfd-abad-c0b2c775ba2c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 652.945290] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f67a3f90-fe46-4b81-90b1-5178dd47e130 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.956765] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Created folder: Project (8fb027b5b61c43cdbac3c89eb1e0f2a1) in parent group-v647729. [ 652.956980] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Creating folder: Instances. Parent ref: group-v647781. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 652.957323] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a5e759cf-65f0-48ab-be47-6a223f20ab2f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.967494] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Created folder: Instances in parent group-v647781. [ 652.967494] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 652.968245] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 652.968411] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-56bab548-ab49-4f6b-ac0b-66fb0dbaca44 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.996042] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 652.996042] env[69994]: value = "task-3241349" [ 652.996042] env[69994]: _type = "Task" [ 652.996042] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.005195] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241349, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.053517] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Acquiring lock "ce6f9a88-faa8-442e-8b48-64979dd2d03e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 653.054096] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Lock "ce6f9a88-faa8-442e-8b48-64979dd2d03e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 653.107153] env[69994]: DEBUG nova.scheduler.client.report [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 653.179660] env[69994]: DEBUG nova.compute.manager [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 653.190130] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7fd06ea0-ee52-424f-97a9-fc86e82a3d85 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 653.191969] env[69994]: DEBUG nova.compute.manager [req-47315e20-0477-4724-901d-1b3e617a5814 req-c37ce52e-6871-41cd-967d-aa696d07e8ef service nova] [instance: 5badecfd-5784-4968-8519-419a01c67465] Received event network-changed-097f8c85-cd23-443b-8f4a-aae58ce5d392 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 653.191969] env[69994]: DEBUG nova.compute.manager [req-47315e20-0477-4724-901d-1b3e617a5814 req-c37ce52e-6871-41cd-967d-aa696d07e8ef service nova] [instance: 5badecfd-5784-4968-8519-419a01c67465] Refreshing instance network info cache due to event network-changed-097f8c85-cd23-443b-8f4a-aae58ce5d392. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 653.191969] env[69994]: DEBUG oslo_concurrency.lockutils [req-47315e20-0477-4724-901d-1b3e617a5814 req-c37ce52e-6871-41cd-967d-aa696d07e8ef service nova] Acquiring lock "refresh_cache-5badecfd-5784-4968-8519-419a01c67465" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.192201] env[69994]: DEBUG oslo_concurrency.lockutils [req-47315e20-0477-4724-901d-1b3e617a5814 req-c37ce52e-6871-41cd-967d-aa696d07e8ef service nova] Acquired lock "refresh_cache-5badecfd-5784-4968-8519-419a01c67465" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 653.192261] env[69994]: DEBUG nova.network.neutron [req-47315e20-0477-4724-901d-1b3e617a5814 req-c37ce52e-6871-41cd-967d-aa696d07e8ef service nova] [instance: 5badecfd-5784-4968-8519-419a01c67465] Refreshing network info cache for port 097f8c85-cd23-443b-8f4a-aae58ce5d392 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 653.308311] env[69994]: DEBUG oslo_vmware.api [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Task: {'id': task-3241346, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.594403} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.308601] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 5badecfd-5784-4968-8519-419a01c67465/5badecfd-5784-4968-8519-419a01c67465.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 653.308810] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 653.309083] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-be674fe9-91b8-4b56-9e4b-410994929bd7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.316929] env[69994]: DEBUG oslo_vmware.api [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Waiting for the task: (returnval){ [ 653.316929] env[69994]: value = "task-3241350" [ 653.316929] env[69994]: _type = "Task" [ 653.316929] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.326563] env[69994]: DEBUG oslo_vmware.api [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Task: {'id': task-3241350, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.396911] env[69994]: DEBUG oslo_concurrency.lockutils [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 653.492737] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Acquiring lock "f109c803-bf37-4845-8956-4336dbc8a946" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 653.492991] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Lock "f109c803-bf37-4845-8956-4336dbc8a946" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 653.496263] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Acquiring lock "f109c803-bf37-4845-8956-4336dbc8a946-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 653.496507] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Lock "f109c803-bf37-4845-8956-4336dbc8a946-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 653.496689] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Lock "f109c803-bf37-4845-8956-4336dbc8a946-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 653.504024] env[69994]: INFO nova.compute.manager [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Terminating instance [ 653.511764] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241349, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.613744] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.768s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 653.614368] env[69994]: DEBUG nova.compute.manager [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 653.620355] env[69994]: DEBUG oslo_concurrency.lockutils [None req-78c4039c-3cbb-4a63-b2cf-2df8fc79ea48 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.739s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 653.620355] env[69994]: DEBUG nova.objects.instance [None req-78c4039c-3cbb-4a63-b2cf-2df8fc79ea48 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Lazy-loading 'resources' on Instance uuid f3945280-ee10-426b-bcab-3e52e8779c55 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 653.726325] env[69994]: DEBUG oslo_concurrency.lockutils [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 653.802867] env[69994]: DEBUG nova.network.neutron [req-9173a4de-e82b-463e-b8d3-e54d4b538ed9 req-f8d9bf30-642a-4dee-9da8-bb4d069bc1d1 service nova] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Updated VIF entry in instance network info cache for port b3347b62-0c9c-4b6c-8d07-587f2423850c. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 653.803767] env[69994]: DEBUG nova.network.neutron [req-9173a4de-e82b-463e-b8d3-e54d4b538ed9 req-f8d9bf30-642a-4dee-9da8-bb4d069bc1d1 service nova] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Updating instance_info_cache with network_info: [{"id": "b3347b62-0c9c-4b6c-8d07-587f2423850c", "address": "fa:16:3e:2c:18:d7", "network": {"id": "ac661ad7-377f-4b70-9be6-97e7a77207b9", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1514924772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7f33906db9fd416884267f628a3f05ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3347b62-0c", "ovs_interfaceid": "b3347b62-0c9c-4b6c-8d07-587f2423850c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 653.831164] env[69994]: DEBUG oslo_vmware.api [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Task: {'id': task-3241350, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064306} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.831579] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 653.832593] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f41dc97d-e691-4bd5-b9d6-3aecbb5fd3ae {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.857411] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Reconfiguring VM instance instance-00000011 to attach disk [datastore2] 5badecfd-5784-4968-8519-419a01c67465/5badecfd-5784-4968-8519-419a01c67465.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 653.857956] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7e8ab9c8-9b73-488a-a84c-5444bb460f48 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.884295] env[69994]: DEBUG oslo_vmware.api [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Waiting for the task: (returnval){ [ 653.884295] env[69994]: value = "task-3241351" [ 653.884295] env[69994]: _type = "Task" [ 653.884295] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.893822] env[69994]: DEBUG oslo_vmware.api [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Task: {'id': task-3241351, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.008971] env[69994]: DEBUG nova.compute.manager [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 654.009271] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 654.014172] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4751cc9-9cc0-4d75-92d0-4b03a5bc7975 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.019378] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241349, 'name': CreateVM_Task, 'duration_secs': 0.597232} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.019651] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 654.020653] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.020736] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 654.021060] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 654.021320] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-363b7da2-fa1b-4bd6-b634-4a898cb3e1e0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.025316] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 654.025884] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5e3201bb-d101-4074-8a23-8c2d30e4cdc4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.029037] env[69994]: DEBUG oslo_vmware.api [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for the task: (returnval){ [ 654.029037] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52bc22a2-a350-eb47-37b6-a8af050b6943" [ 654.029037] env[69994]: _type = "Task" [ 654.029037] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.037134] env[69994]: DEBUG oslo_vmware.api [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Waiting for the task: (returnval){ [ 654.037134] env[69994]: value = "task-3241352" [ 654.037134] env[69994]: _type = "Task" [ 654.037134] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.042700] env[69994]: DEBUG oslo_vmware.api [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52bc22a2-a350-eb47-37b6-a8af050b6943, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.047799] env[69994]: DEBUG oslo_vmware.api [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Task: {'id': task-3241352, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.109456] env[69994]: DEBUG nova.network.neutron [req-47315e20-0477-4724-901d-1b3e617a5814 req-c37ce52e-6871-41cd-967d-aa696d07e8ef service nova] [instance: 5badecfd-5784-4968-8519-419a01c67465] Updated VIF entry in instance network info cache for port 097f8c85-cd23-443b-8f4a-aae58ce5d392. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 654.110079] env[69994]: DEBUG nova.network.neutron [req-47315e20-0477-4724-901d-1b3e617a5814 req-c37ce52e-6871-41cd-967d-aa696d07e8ef service nova] [instance: 5badecfd-5784-4968-8519-419a01c67465] Updating instance_info_cache with network_info: [{"id": "097f8c85-cd23-443b-8f4a-aae58ce5d392", "address": "fa:16:3e:c9:7d:47", "network": {"id": "2fcc4ee1-e8f5-4288-9c43-e97a20e8e0e7", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-471387069-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39d3a367749e4a169ce2ad95e4600d49", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b107fab-ee71-47db-ad4d-3c6f05546843", "external-id": "cl2-zone-554", "segmentation_id": 554, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap097f8c85-cd", "ovs_interfaceid": "097f8c85-cd23-443b-8f4a-aae58ce5d392", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 654.119047] env[69994]: DEBUG nova.compute.utils [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 654.121181] env[69994]: DEBUG nova.compute.manager [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 654.121555] env[69994]: DEBUG nova.network.neutron [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 654.208854] env[69994]: DEBUG nova.policy [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab7d3e987631424d8164faf2c8488724', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0337cba194454333bff45bdac0d1e371', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 654.307680] env[69994]: DEBUG oslo_concurrency.lockutils [req-9173a4de-e82b-463e-b8d3-e54d4b538ed9 req-f8d9bf30-642a-4dee-9da8-bb4d069bc1d1 service nova] Releasing lock "refresh_cache-2f710439-0216-401e-9759-af584f9bd00d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 654.395666] env[69994]: DEBUG oslo_vmware.api [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Task: {'id': task-3241351, 'name': ReconfigVM_Task, 'duration_secs': 0.366156} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.396066] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Reconfigured VM instance instance-00000011 to attach disk [datastore2] 5badecfd-5784-4968-8519-419a01c67465/5badecfd-5784-4968-8519-419a01c67465.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 654.396716] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fe987079-fc32-4c8d-8c47-23c5f6e06969 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.405266] env[69994]: DEBUG oslo_vmware.api [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Waiting for the task: (returnval){ [ 654.405266] env[69994]: value = "task-3241353" [ 654.405266] env[69994]: _type = "Task" [ 654.405266] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.413782] env[69994]: DEBUG oslo_vmware.api [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Task: {'id': task-3241353, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.462528] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0de31bf9-e81b-4e03-b4bf-bfbf9327fd52 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.488298] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Updating instance 'dbad6bed-64ba-4dfd-abad-c0b2c775ba2c' progress to 0 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 654.545290] env[69994]: DEBUG oslo_vmware.api [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52bc22a2-a350-eb47-37b6-a8af050b6943, 'name': SearchDatastore_Task, 'duration_secs': 0.009975} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.545290] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 654.545290] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 654.545290] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.545672] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 654.545910] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 654.549814] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5967e7da-0945-4802-b7ee-1030488e423b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.552476] env[69994]: DEBUG oslo_vmware.api [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Task: {'id': task-3241352, 'name': PowerOffVM_Task, 'duration_secs': 0.221175} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.557165] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 654.557402] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 654.559030] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d0abd1b5-68f4-45bd-8742-223aafe77232 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.564873] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 654.565746] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 654.566271] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c42bf128-aa37-449d-bae9-60a484761974 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.578807] env[69994]: DEBUG oslo_vmware.api [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for the task: (returnval){ [ 654.578807] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5214aacf-be78-adf4-294a-09a3cc9c1dd2" [ 654.578807] env[69994]: _type = "Task" [ 654.578807] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.590365] env[69994]: DEBUG oslo_vmware.api [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5214aacf-be78-adf4-294a-09a3cc9c1dd2, 'name': SearchDatastore_Task, 'duration_secs': 0.013548} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.593720] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99321f86-d3ae-4188-a965-e13d7dee5338 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.600103] env[69994]: DEBUG oslo_vmware.api [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for the task: (returnval){ [ 654.600103] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]521a7b65-4fa0-911e-082b-f599f49e0a85" [ 654.600103] env[69994]: _type = "Task" [ 654.600103] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.608239] env[69994]: DEBUG oslo_vmware.api [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521a7b65-4fa0-911e-082b-f599f49e0a85, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.613302] env[69994]: DEBUG oslo_concurrency.lockutils [req-47315e20-0477-4724-901d-1b3e617a5814 req-c37ce52e-6871-41cd-967d-aa696d07e8ef service nova] Releasing lock "refresh_cache-5badecfd-5784-4968-8519-419a01c67465" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 654.613712] env[69994]: DEBUG nova.compute.manager [req-47315e20-0477-4724-901d-1b3e617a5814 req-c37ce52e-6871-41cd-967d-aa696d07e8ef service nova] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Received event network-vif-plugged-641f1973-439b-47b8-a402-9d7a8557e0c2 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 654.614031] env[69994]: DEBUG oslo_concurrency.lockutils [req-47315e20-0477-4724-901d-1b3e617a5814 req-c37ce52e-6871-41cd-967d-aa696d07e8ef service nova] Acquiring lock "c47c26c8-3f7f-436b-95aa-0bd08d41e62b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 654.614272] env[69994]: DEBUG oslo_concurrency.lockutils [req-47315e20-0477-4724-901d-1b3e617a5814 req-c37ce52e-6871-41cd-967d-aa696d07e8ef service nova] Lock "c47c26c8-3f7f-436b-95aa-0bd08d41e62b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 654.614442] env[69994]: DEBUG oslo_concurrency.lockutils [req-47315e20-0477-4724-901d-1b3e617a5814 req-c37ce52e-6871-41cd-967d-aa696d07e8ef service nova] Lock "c47c26c8-3f7f-436b-95aa-0bd08d41e62b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 654.614613] env[69994]: DEBUG nova.compute.manager [req-47315e20-0477-4724-901d-1b3e617a5814 req-c37ce52e-6871-41cd-967d-aa696d07e8ef service nova] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] No waiting events found dispatching network-vif-plugged-641f1973-439b-47b8-a402-9d7a8557e0c2 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 654.614781] env[69994]: WARNING nova.compute.manager [req-47315e20-0477-4724-901d-1b3e617a5814 req-c37ce52e-6871-41cd-967d-aa696d07e8ef service nova] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Received unexpected event network-vif-plugged-641f1973-439b-47b8-a402-9d7a8557e0c2 for instance with vm_state building and task_state spawning. [ 654.614945] env[69994]: DEBUG nova.compute.manager [req-47315e20-0477-4724-901d-1b3e617a5814 req-c37ce52e-6871-41cd-967d-aa696d07e8ef service nova] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Received event network-changed-641f1973-439b-47b8-a402-9d7a8557e0c2 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 654.615114] env[69994]: DEBUG nova.compute.manager [req-47315e20-0477-4724-901d-1b3e617a5814 req-c37ce52e-6871-41cd-967d-aa696d07e8ef service nova] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Refreshing instance network info cache due to event network-changed-641f1973-439b-47b8-a402-9d7a8557e0c2. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 654.615306] env[69994]: DEBUG oslo_concurrency.lockutils [req-47315e20-0477-4724-901d-1b3e617a5814 req-c37ce52e-6871-41cd-967d-aa696d07e8ef service nova] Acquiring lock "refresh_cache-c47c26c8-3f7f-436b-95aa-0bd08d41e62b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.615442] env[69994]: DEBUG oslo_concurrency.lockutils [req-47315e20-0477-4724-901d-1b3e617a5814 req-c37ce52e-6871-41cd-967d-aa696d07e8ef service nova] Acquired lock "refresh_cache-c47c26c8-3f7f-436b-95aa-0bd08d41e62b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 654.615599] env[69994]: DEBUG nova.network.neutron [req-47315e20-0477-4724-901d-1b3e617a5814 req-c37ce52e-6871-41cd-967d-aa696d07e8ef service nova] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Refreshing network info cache for port 641f1973-439b-47b8-a402-9d7a8557e0c2 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 654.621792] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 654.622791] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 654.622791] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Deleting the datastore file [datastore1] f109c803-bf37-4845-8956-4336dbc8a946 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 654.622791] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ef016024-fb35-4d5a-ab95-676e41052a27 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.628043] env[69994]: DEBUG nova.compute.manager [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 654.636018] env[69994]: DEBUG oslo_vmware.api [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Waiting for the task: (returnval){ [ 654.636018] env[69994]: value = "task-3241355" [ 654.636018] env[69994]: _type = "Task" [ 654.636018] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.650053] env[69994]: DEBUG oslo_vmware.api [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Task: {'id': task-3241355, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.740863] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1545aaf5-543b-47f6-8537-997544c026ad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.750596] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50400139-e679-4400-a31f-4fce4d51cb73 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.783538] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b437369-5224-428c-ab13-de1df3a30214 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.791232] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a034440-91d5-4e37-868a-840cc553de70 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.804929] env[69994]: DEBUG nova.compute.provider_tree [None req-78c4039c-3cbb-4a63-b2cf-2df8fc79ea48 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 654.917147] env[69994]: DEBUG oslo_vmware.api [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Task: {'id': task-3241353, 'name': Rename_Task, 'duration_secs': 0.290746} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.917273] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 654.917521] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8411c6ac-54a1-44b7-817f-887b82084e22 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.925474] env[69994]: DEBUG oslo_vmware.api [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Waiting for the task: (returnval){ [ 654.925474] env[69994]: value = "task-3241356" [ 654.925474] env[69994]: _type = "Task" [ 654.925474] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.934664] env[69994]: DEBUG oslo_vmware.api [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Task: {'id': task-3241356, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.997991] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 654.997991] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-195b3589-f7c3-4192-a204-e4b98def6d05 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.005789] env[69994]: DEBUG oslo_vmware.api [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for the task: (returnval){ [ 655.005789] env[69994]: value = "task-3241357" [ 655.005789] env[69994]: _type = "Task" [ 655.005789] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.015124] env[69994]: DEBUG oslo_vmware.api [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241357, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.015668] env[69994]: DEBUG nova.network.neutron [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Successfully created port: 23ce404a-01b4-4000-91a5-8532d84ccfff {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 655.112632] env[69994]: DEBUG oslo_vmware.api [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521a7b65-4fa0-911e-082b-f599f49e0a85, 'name': SearchDatastore_Task, 'duration_secs': 0.00878} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.112918] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 655.113249] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] c47c26c8-3f7f-436b-95aa-0bd08d41e62b/c47c26c8-3f7f-436b-95aa-0bd08d41e62b.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 655.113532] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6a789284-8717-4523-a89b-7c08c81a6362 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.122367] env[69994]: DEBUG oslo_vmware.api [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for the task: (returnval){ [ 655.122367] env[69994]: value = "task-3241358" [ 655.122367] env[69994]: _type = "Task" [ 655.122367] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.135757] env[69994]: DEBUG oslo_vmware.api [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241358, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.145717] env[69994]: DEBUG oslo_vmware.api [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Task: {'id': task-3241355, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.368783} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.145988] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 655.146269] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 655.146493] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 655.146715] env[69994]: INFO nova.compute.manager [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Took 1.14 seconds to destroy the instance on the hypervisor. [ 655.147010] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 655.147286] env[69994]: DEBUG nova.compute.manager [-] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 655.147395] env[69994]: DEBUG nova.network.neutron [-] [instance: f109c803-bf37-4845-8956-4336dbc8a946] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 655.308531] env[69994]: DEBUG nova.scheduler.client.report [None req-78c4039c-3cbb-4a63-b2cf-2df8fc79ea48 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 655.419519] env[69994]: DEBUG nova.network.neutron [req-47315e20-0477-4724-901d-1b3e617a5814 req-c37ce52e-6871-41cd-967d-aa696d07e8ef service nova] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Updated VIF entry in instance network info cache for port 641f1973-439b-47b8-a402-9d7a8557e0c2. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 655.419519] env[69994]: DEBUG nova.network.neutron [req-47315e20-0477-4724-901d-1b3e617a5814 req-c37ce52e-6871-41cd-967d-aa696d07e8ef service nova] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Updating instance_info_cache with network_info: [{"id": "641f1973-439b-47b8-a402-9d7a8557e0c2", "address": "fa:16:3e:11:89:23", "network": {"id": "0edf701f-8d15-4cdd-a234-ebea64cfa425", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-580718484-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8fb027b5b61c43cdbac3c89eb1e0f2a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap641f1973-43", "ovs_interfaceid": "641f1973-439b-47b8-a402-9d7a8557e0c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 655.438384] env[69994]: DEBUG oslo_vmware.api [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Task: {'id': task-3241356, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.518817] env[69994]: DEBUG oslo_vmware.api [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241357, 'name': PowerOffVM_Task, 'duration_secs': 0.217185} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.519496] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 655.522022] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Updating instance 'dbad6bed-64ba-4dfd-abad-c0b2c775ba2c' progress to 17 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 655.635178] env[69994]: DEBUG oslo_vmware.api [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241358, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.639100] env[69994]: DEBUG nova.compute.manager [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 655.668932] env[69994]: DEBUG nova.virt.hardware [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 655.669447] env[69994]: DEBUG nova.virt.hardware [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 655.669652] env[69994]: DEBUG nova.virt.hardware [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 655.669888] env[69994]: DEBUG nova.virt.hardware [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 655.669987] env[69994]: DEBUG nova.virt.hardware [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 655.670150] env[69994]: DEBUG nova.virt.hardware [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 655.670382] env[69994]: DEBUG nova.virt.hardware [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 655.670520] env[69994]: DEBUG nova.virt.hardware [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 655.670708] env[69994]: DEBUG nova.virt.hardware [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 655.670837] env[69994]: DEBUG nova.virt.hardware [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 655.671172] env[69994]: DEBUG nova.virt.hardware [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 655.672090] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-200ff25a-144f-4b0f-a926-aecb5dc7fec5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.684430] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b003d0b8-4b86-4175-bf2c-27a25f9a2248 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.820488] env[69994]: DEBUG oslo_concurrency.lockutils [None req-78c4039c-3cbb-4a63-b2cf-2df8fc79ea48 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.200s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 655.822107] env[69994]: DEBUG oslo_concurrency.lockutils [None req-948c0d24-1dca-4889-8274-7e6f63bd5c70 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.668s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 655.822554] env[69994]: DEBUG nova.objects.instance [None req-948c0d24-1dca-4889-8274-7e6f63bd5c70 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Lazy-loading 'resources' on Instance uuid 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 655.841045] env[69994]: INFO nova.scheduler.client.report [None req-78c4039c-3cbb-4a63-b2cf-2df8fc79ea48 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Deleted allocations for instance f3945280-ee10-426b-bcab-3e52e8779c55 [ 655.921920] env[69994]: DEBUG oslo_concurrency.lockutils [req-47315e20-0477-4724-901d-1b3e617a5814 req-c37ce52e-6871-41cd-967d-aa696d07e8ef service nova] Releasing lock "refresh_cache-c47c26c8-3f7f-436b-95aa-0bd08d41e62b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 655.937523] env[69994]: DEBUG oslo_vmware.api [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Task: {'id': task-3241356, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.962754] env[69994]: DEBUG nova.compute.manager [req-d67a50a3-d581-44e2-9de9-c96457cc2048 req-e588ee2a-5efd-4f3e-b502-29bc906ce6ca service nova] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Received event network-changed-bcae7796-2595-4bff-96c1-d85a7cba05d8 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 655.962754] env[69994]: DEBUG nova.compute.manager [req-d67a50a3-d581-44e2-9de9-c96457cc2048 req-e588ee2a-5efd-4f3e-b502-29bc906ce6ca service nova] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Refreshing instance network info cache due to event network-changed-bcae7796-2595-4bff-96c1-d85a7cba05d8. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 655.963058] env[69994]: DEBUG oslo_concurrency.lockutils [req-d67a50a3-d581-44e2-9de9-c96457cc2048 req-e588ee2a-5efd-4f3e-b502-29bc906ce6ca service nova] Acquiring lock "refresh_cache-7e7953f7-ed5d-4515-9181-93d343ad772d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.963058] env[69994]: DEBUG oslo_concurrency.lockutils [req-d67a50a3-d581-44e2-9de9-c96457cc2048 req-e588ee2a-5efd-4f3e-b502-29bc906ce6ca service nova] Acquired lock "refresh_cache-7e7953f7-ed5d-4515-9181-93d343ad772d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 655.963179] env[69994]: DEBUG nova.network.neutron [req-d67a50a3-d581-44e2-9de9-c96457cc2048 req-e588ee2a-5efd-4f3e-b502-29bc906ce6ca service nova] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Refreshing network info cache for port bcae7796-2595-4bff-96c1-d85a7cba05d8 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 656.032118] env[69994]: DEBUG nova.virt.hardware [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 656.032118] env[69994]: DEBUG nova.virt.hardware [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 656.032118] env[69994]: DEBUG nova.virt.hardware [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 656.032118] env[69994]: DEBUG nova.virt.hardware [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 656.032363] env[69994]: DEBUG nova.virt.hardware [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 656.032363] env[69994]: DEBUG nova.virt.hardware [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 656.032363] env[69994]: DEBUG nova.virt.hardware [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 656.032363] env[69994]: DEBUG nova.virt.hardware [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 656.032363] env[69994]: DEBUG nova.virt.hardware [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 656.032543] env[69994]: DEBUG nova.virt.hardware [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 656.032543] env[69994]: DEBUG nova.virt.hardware [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 656.038754] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3d12082d-e9ee-4251-a61a-e490e076be9b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.055548] env[69994]: DEBUG oslo_vmware.api [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for the task: (returnval){ [ 656.055548] env[69994]: value = "task-3241359" [ 656.055548] env[69994]: _type = "Task" [ 656.055548] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.065690] env[69994]: DEBUG oslo_vmware.api [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241359, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.091500] env[69994]: INFO nova.compute.manager [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Rebuilding instance [ 656.134981] env[69994]: DEBUG oslo_vmware.api [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241358, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.588999} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.140901] env[69994]: DEBUG nova.compute.manager [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 656.141310] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] c47c26c8-3f7f-436b-95aa-0bd08d41e62b/c47c26c8-3f7f-436b-95aa-0bd08d41e62b.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 656.141588] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 656.142454] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ce9dbcc-5bf2-46f7-b5b8-6ac6a1d056f5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.145705] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-47d6c918-db71-4ec9-84f2-607f3acb23ff {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.156087] env[69994]: DEBUG oslo_vmware.api [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for the task: (returnval){ [ 656.156087] env[69994]: value = "task-3241360" [ 656.156087] env[69994]: _type = "Task" [ 656.156087] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.165030] env[69994]: DEBUG oslo_vmware.api [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241360, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.216456] env[69994]: DEBUG nova.network.neutron [-] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 656.350492] env[69994]: DEBUG oslo_concurrency.lockutils [None req-78c4039c-3cbb-4a63-b2cf-2df8fc79ea48 tempest-ServerDiagnosticsV248Test-103199222 tempest-ServerDiagnosticsV248Test-103199222-project-member] Lock "f3945280-ee10-426b-bcab-3e52e8779c55" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.933s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 656.447348] env[69994]: DEBUG oslo_vmware.api [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Task: {'id': task-3241356, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.469622] env[69994]: DEBUG oslo_concurrency.lockutils [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquiring lock "87473dd1-458d-4ef4-a1bd-7e653e509ea4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 656.470194] env[69994]: DEBUG oslo_concurrency.lockutils [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "87473dd1-458d-4ef4-a1bd-7e653e509ea4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 656.569204] env[69994]: DEBUG oslo_vmware.api [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241359, 'name': ReconfigVM_Task, 'duration_secs': 0.238008} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.569882] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Updating instance 'dbad6bed-64ba-4dfd-abad-c0b2c775ba2c' progress to 33 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 656.673345] env[69994]: DEBUG oslo_vmware.api [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241360, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069867} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.677146] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 656.680706] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7d0fe7e-b712-4a7c-828f-ef38791d749a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.704937] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Reconfiguring VM instance instance-00000012 to attach disk [datastore2] c47c26c8-3f7f-436b-95aa-0bd08d41e62b/c47c26c8-3f7f-436b-95aa-0bd08d41e62b.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 656.708353] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7e7664c4-346d-418a-8b30-2b62e47b4b9e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.724790] env[69994]: INFO nova.compute.manager [-] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Took 1.58 seconds to deallocate network for instance. [ 656.735662] env[69994]: DEBUG oslo_vmware.api [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for the task: (returnval){ [ 656.735662] env[69994]: value = "task-3241361" [ 656.735662] env[69994]: _type = "Task" [ 656.735662] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.747755] env[69994]: DEBUG oslo_vmware.api [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241361, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.928393] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65a7656c-7ace-4b71-b532-24e31aab11b6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.942504] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-466a5464-1748-4e22-861f-52e89ea2b930 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.946916] env[69994]: DEBUG oslo_vmware.api [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Task: {'id': task-3241356, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.977735] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f074beb4-36ca-49bd-af66-5fb2bd81ed67 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.989249] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95bdc4c5-229e-46d5-9913-f9e14b4cfcde {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.005181] env[69994]: DEBUG nova.compute.provider_tree [None req-948c0d24-1dca-4889-8274-7e6f63bd5c70 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 657.084491] env[69994]: DEBUG nova.virt.hardware [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 657.084735] env[69994]: DEBUG nova.virt.hardware [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 657.084886] env[69994]: DEBUG nova.virt.hardware [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 657.085160] env[69994]: DEBUG nova.virt.hardware [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 657.085312] env[69994]: DEBUG nova.virt.hardware [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 657.085457] env[69994]: DEBUG nova.virt.hardware [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 657.085656] env[69994]: DEBUG nova.virt.hardware [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 657.085809] env[69994]: DEBUG nova.virt.hardware [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 657.086015] env[69994]: DEBUG nova.virt.hardware [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 657.086463] env[69994]: DEBUG nova.virt.hardware [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 657.086689] env[69994]: DEBUG nova.virt.hardware [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 657.095038] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Reconfiguring VM instance instance-00000005 to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 657.095781] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d82aa30a-98d4-4c23-b2cc-d795d1fbdf17 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.120257] env[69994]: DEBUG oslo_vmware.api [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for the task: (returnval){ [ 657.120257] env[69994]: value = "task-3241362" [ 657.120257] env[69994]: _type = "Task" [ 657.120257] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.129352] env[69994]: DEBUG oslo_vmware.api [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241362, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.161371] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 657.161709] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-673846b1-53cb-48c8-b73f-a450b6add17a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.164525] env[69994]: DEBUG nova.network.neutron [req-d67a50a3-d581-44e2-9de9-c96457cc2048 req-e588ee2a-5efd-4f3e-b502-29bc906ce6ca service nova] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Updated VIF entry in instance network info cache for port bcae7796-2595-4bff-96c1-d85a7cba05d8. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 657.164956] env[69994]: DEBUG nova.network.neutron [req-d67a50a3-d581-44e2-9de9-c96457cc2048 req-e588ee2a-5efd-4f3e-b502-29bc906ce6ca service nova] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Updating instance_info_cache with network_info: [{"id": "bcae7796-2595-4bff-96c1-d85a7cba05d8", "address": "fa:16:3e:32:99:ad", "network": {"id": "c152967f-46eb-4bab-a1fd-985f8d1e41e3", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-381864411-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6356297e311c4b47b689a7cda41127f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "274afb4c-04df-4213-8ad2-8f48a10d78a8", "external-id": "nsx-vlan-transportzone-515", "segmentation_id": 515, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcae7796-25", "ovs_interfaceid": "bcae7796-2595-4bff-96c1-d85a7cba05d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 657.171706] env[69994]: DEBUG oslo_vmware.api [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Waiting for the task: (returnval){ [ 657.171706] env[69994]: value = "task-3241363" [ 657.171706] env[69994]: _type = "Task" [ 657.171706] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.182040] env[69994]: DEBUG oslo_vmware.api [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Task: {'id': task-3241363, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.232045] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 657.250195] env[69994]: DEBUG oslo_vmware.api [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241361, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.446638] env[69994]: DEBUG oslo_vmware.api [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Task: {'id': task-3241356, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.459769] env[69994]: DEBUG nova.network.neutron [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Successfully updated port: 23ce404a-01b4-4000-91a5-8532d84ccfff {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 657.509024] env[69994]: DEBUG nova.scheduler.client.report [None req-948c0d24-1dca-4889-8274-7e6f63bd5c70 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 657.631747] env[69994]: DEBUG oslo_vmware.api [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241362, 'name': ReconfigVM_Task, 'duration_secs': 0.183505} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.632083] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Reconfigured VM instance instance-00000005 to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 657.632987] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f4f5016-8dce-4ead-b5b5-86e5cba5b4fe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.656039] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Reconfiguring VM instance instance-00000005 to attach disk [datastore1] dbad6bed-64ba-4dfd-abad-c0b2c775ba2c/dbad6bed-64ba-4dfd-abad-c0b2c775ba2c.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 657.658995] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b88a388-324c-42bc-afec-72f370274e43 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.672375] env[69994]: DEBUG oslo_concurrency.lockutils [req-d67a50a3-d581-44e2-9de9-c96457cc2048 req-e588ee2a-5efd-4f3e-b502-29bc906ce6ca service nova] Releasing lock "refresh_cache-7e7953f7-ed5d-4515-9181-93d343ad772d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 657.678536] env[69994]: DEBUG oslo_vmware.api [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for the task: (returnval){ [ 657.678536] env[69994]: value = "task-3241364" [ 657.678536] env[69994]: _type = "Task" [ 657.678536] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.682038] env[69994]: DEBUG oslo_vmware.api [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Task: {'id': task-3241363, 'name': PowerOffVM_Task, 'duration_secs': 0.148355} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.685032] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 657.685328] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 657.686074] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6803c14d-69d3-4874-90a7-fec3523d0361 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.694065] env[69994]: DEBUG oslo_vmware.api [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241364, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.696163] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 657.696457] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b0bb67e5-fb5b-435d-a7f7-dab6164d0be8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.719948] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 657.720210] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 657.720395] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Deleting the datastore file [datastore2] 21f66039-6292-4d9c-b97d-668d029def24 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 657.720661] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-26b8ff2c-4f8d-421c-b9da-e44c30827528 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.727028] env[69994]: DEBUG oslo_vmware.api [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Waiting for the task: (returnval){ [ 657.727028] env[69994]: value = "task-3241366" [ 657.727028] env[69994]: _type = "Task" [ 657.727028] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.741309] env[69994]: DEBUG oslo_vmware.api [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Task: {'id': task-3241366, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.750450] env[69994]: DEBUG oslo_vmware.api [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241361, 'name': ReconfigVM_Task, 'duration_secs': 0.566792} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.751956] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Reconfigured VM instance instance-00000012 to attach disk [datastore2] c47c26c8-3f7f-436b-95aa-0bd08d41e62b/c47c26c8-3f7f-436b-95aa-0bd08d41e62b.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 657.751956] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a34ebdbc-f8c2-4bd1-a3a7-b8d53658419c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.757297] env[69994]: DEBUG oslo_vmware.api [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for the task: (returnval){ [ 657.757297] env[69994]: value = "task-3241367" [ 657.757297] env[69994]: _type = "Task" [ 657.757297] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.765917] env[69994]: DEBUG oslo_vmware.api [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241367, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.814606] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquiring lock "e8caf244-413b-49bb-bdff-79aca0ccbc2b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 657.815299] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "e8caf244-413b-49bb-bdff-79aca0ccbc2b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 657.943327] env[69994]: DEBUG oslo_vmware.api [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Task: {'id': task-3241356, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.963931] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Acquiring lock "refresh_cache-aeb7928a-8307-49e7-b019-a4c674e6369a" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.963931] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Acquired lock "refresh_cache-aeb7928a-8307-49e7-b019-a4c674e6369a" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 657.963931] env[69994]: DEBUG nova.network.neutron [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 658.015030] env[69994]: DEBUG oslo_concurrency.lockutils [None req-948c0d24-1dca-4889-8274-7e6f63bd5c70 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.193s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 658.019909] env[69994]: DEBUG oslo_concurrency.lockutils [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.774s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 658.020847] env[69994]: INFO nova.compute.claims [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 658.050051] env[69994]: INFO nova.scheduler.client.report [None req-948c0d24-1dca-4889-8274-7e6f63bd5c70 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Deleted allocations for instance 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b [ 658.192427] env[69994]: DEBUG oslo_vmware.api [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241364, 'name': ReconfigVM_Task, 'duration_secs': 0.286898} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.192751] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Reconfigured VM instance instance-00000005 to attach disk [datastore1] dbad6bed-64ba-4dfd-abad-c0b2c775ba2c/dbad6bed-64ba-4dfd-abad-c0b2c775ba2c.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 658.193030] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Updating instance 'dbad6bed-64ba-4dfd-abad-c0b2c775ba2c' progress to 50 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 658.227104] env[69994]: DEBUG nova.compute.manager [req-21a7bde1-56a2-4327-a1ff-9d03c8dc2f1e req-f56c8cb6-7a17-417e-8af1-33341b01b2fa service nova] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Received event network-vif-deleted-08fb6905-cef5-48b8-be29-8244adaf4c18 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 658.227334] env[69994]: DEBUG nova.compute.manager [req-21a7bde1-56a2-4327-a1ff-9d03c8dc2f1e req-f56c8cb6-7a17-417e-8af1-33341b01b2fa service nova] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Received event network-vif-plugged-23ce404a-01b4-4000-91a5-8532d84ccfff {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 658.227528] env[69994]: DEBUG oslo_concurrency.lockutils [req-21a7bde1-56a2-4327-a1ff-9d03c8dc2f1e req-f56c8cb6-7a17-417e-8af1-33341b01b2fa service nova] Acquiring lock "aeb7928a-8307-49e7-b019-a4c674e6369a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 658.227728] env[69994]: DEBUG oslo_concurrency.lockutils [req-21a7bde1-56a2-4327-a1ff-9d03c8dc2f1e req-f56c8cb6-7a17-417e-8af1-33341b01b2fa service nova] Lock "aeb7928a-8307-49e7-b019-a4c674e6369a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 658.228104] env[69994]: DEBUG oslo_concurrency.lockutils [req-21a7bde1-56a2-4327-a1ff-9d03c8dc2f1e req-f56c8cb6-7a17-417e-8af1-33341b01b2fa service nova] Lock "aeb7928a-8307-49e7-b019-a4c674e6369a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 658.228718] env[69994]: DEBUG nova.compute.manager [req-21a7bde1-56a2-4327-a1ff-9d03c8dc2f1e req-f56c8cb6-7a17-417e-8af1-33341b01b2fa service nova] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] No waiting events found dispatching network-vif-plugged-23ce404a-01b4-4000-91a5-8532d84ccfff {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 658.228943] env[69994]: WARNING nova.compute.manager [req-21a7bde1-56a2-4327-a1ff-9d03c8dc2f1e req-f56c8cb6-7a17-417e-8af1-33341b01b2fa service nova] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Received unexpected event network-vif-plugged-23ce404a-01b4-4000-91a5-8532d84ccfff for instance with vm_state building and task_state spawning. [ 658.229157] env[69994]: DEBUG nova.compute.manager [req-21a7bde1-56a2-4327-a1ff-9d03c8dc2f1e req-f56c8cb6-7a17-417e-8af1-33341b01b2fa service nova] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Received event network-changed-23ce404a-01b4-4000-91a5-8532d84ccfff {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 658.229326] env[69994]: DEBUG nova.compute.manager [req-21a7bde1-56a2-4327-a1ff-9d03c8dc2f1e req-f56c8cb6-7a17-417e-8af1-33341b01b2fa service nova] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Refreshing instance network info cache due to event network-changed-23ce404a-01b4-4000-91a5-8532d84ccfff. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 658.229525] env[69994]: DEBUG oslo_concurrency.lockutils [req-21a7bde1-56a2-4327-a1ff-9d03c8dc2f1e req-f56c8cb6-7a17-417e-8af1-33341b01b2fa service nova] Acquiring lock "refresh_cache-aeb7928a-8307-49e7-b019-a4c674e6369a" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.244235] env[69994]: DEBUG oslo_vmware.api [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Task: {'id': task-3241366, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.120358} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.244478] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 658.244656] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 658.244822] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 658.269659] env[69994]: DEBUG oslo_vmware.api [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241367, 'name': Rename_Task, 'duration_secs': 0.175028} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.270194] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 658.270437] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0094ab2d-0587-4013-8865-21d08439a5f1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.277393] env[69994]: DEBUG oslo_vmware.api [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for the task: (returnval){ [ 658.277393] env[69994]: value = "task-3241368" [ 658.277393] env[69994]: _type = "Task" [ 658.277393] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.289808] env[69994]: DEBUG oslo_vmware.api [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241368, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.442820] env[69994]: DEBUG oslo_vmware.api [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Task: {'id': task-3241356, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.528766] env[69994]: DEBUG nova.network.neutron [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 658.562613] env[69994]: DEBUG oslo_concurrency.lockutils [None req-948c0d24-1dca-4889-8274-7e6f63bd5c70 tempest-ServerDiagnosticsNegativeTest-1585118686 tempest-ServerDiagnosticsNegativeTest-1585118686-project-member] Lock "6ca85dc6-ace9-4c5e-a11e-a3d5060d766b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.366s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 658.592016] env[69994]: DEBUG oslo_concurrency.lockutils [None req-024ddc30-cada-45d8-8427-0c45b266bd2e tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Acquiring lock "2f710439-0216-401e-9759-af584f9bd00d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 658.592334] env[69994]: DEBUG oslo_concurrency.lockutils [None req-024ddc30-cada-45d8-8427-0c45b266bd2e tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Lock "2f710439-0216-401e-9759-af584f9bd00d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 658.592579] env[69994]: DEBUG oslo_concurrency.lockutils [None req-024ddc30-cada-45d8-8427-0c45b266bd2e tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Acquiring lock "2f710439-0216-401e-9759-af584f9bd00d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 658.592807] env[69994]: DEBUG oslo_concurrency.lockutils [None req-024ddc30-cada-45d8-8427-0c45b266bd2e tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Lock "2f710439-0216-401e-9759-af584f9bd00d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 658.593029] env[69994]: DEBUG oslo_concurrency.lockutils [None req-024ddc30-cada-45d8-8427-0c45b266bd2e tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Lock "2f710439-0216-401e-9759-af584f9bd00d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 658.597442] env[69994]: INFO nova.compute.manager [None req-024ddc30-cada-45d8-8427-0c45b266bd2e tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Terminating instance [ 658.701267] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11c8b1f9-26d1-4d59-9834-0eb20fe72f9a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.721072] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6531554-bc54-494d-b9f9-00e5831bbaef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.756942] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Updating instance 'dbad6bed-64ba-4dfd-abad-c0b2c775ba2c' progress to 67 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 658.788142] env[69994]: DEBUG oslo_vmware.api [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241368, 'name': PowerOnVM_Task, 'duration_secs': 0.46921} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.788480] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 658.788800] env[69994]: INFO nova.compute.manager [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Took 8.69 seconds to spawn the instance on the hypervisor. [ 658.789108] env[69994]: DEBUG nova.compute.manager [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 658.790081] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34f5b147-f7ab-46af-a3d3-823f7339a91a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.843189] env[69994]: DEBUG nova.network.neutron [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Updating instance_info_cache with network_info: [{"id": "23ce404a-01b4-4000-91a5-8532d84ccfff", "address": "fa:16:3e:17:49:e3", "network": {"id": "1cbd703c-4210-4eba-82b0-8d9ce1838326", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1943063431-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0337cba194454333bff45bdac0d1e371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "578f2ebc-5719-4d31-9bac-d3d247f9293f", "external-id": "nsx-vlan-transportzone-58", "segmentation_id": 58, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23ce404a-01", "ovs_interfaceid": "23ce404a-01b4-4000-91a5-8532d84ccfff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 658.949043] env[69994]: DEBUG oslo_vmware.api [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Task: {'id': task-3241356, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.101553] env[69994]: DEBUG nova.compute.manager [None req-024ddc30-cada-45d8-8427-0c45b266bd2e tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 659.101772] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-024ddc30-cada-45d8-8427-0c45b266bd2e tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 659.103970] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a16bd2f-0d61-4b6f-a063-a82f298a1551 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.114388] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-024ddc30-cada-45d8-8427-0c45b266bd2e tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 659.119492] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-066bb3b1-d811-4038-94a3-880dac2ae0ac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.125045] env[69994]: DEBUG oslo_vmware.api [None req-024ddc30-cada-45d8-8427-0c45b266bd2e tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Waiting for the task: (returnval){ [ 659.125045] env[69994]: value = "task-3241369" [ 659.125045] env[69994]: _type = "Task" [ 659.125045] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.134170] env[69994]: DEBUG oslo_vmware.api [None req-024ddc30-cada-45d8-8427-0c45b266bd2e tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Task: {'id': task-3241369, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.314824] env[69994]: INFO nova.compute.manager [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Took 28.48 seconds to build instance. [ 659.324348] env[69994]: DEBUG nova.virt.hardware [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 659.324348] env[69994]: DEBUG nova.virt.hardware [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 659.327858] env[69994]: DEBUG nova.virt.hardware [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 659.327858] env[69994]: DEBUG nova.virt.hardware [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 659.327858] env[69994]: DEBUG nova.virt.hardware [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 659.327858] env[69994]: DEBUG nova.virt.hardware [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 659.328289] env[69994]: DEBUG nova.virt.hardware [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 659.328336] env[69994]: DEBUG nova.virt.hardware [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 659.328590] env[69994]: DEBUG nova.virt.hardware [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 659.328666] env[69994]: DEBUG nova.virt.hardware [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 659.328807] env[69994]: DEBUG nova.virt.hardware [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 659.329792] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86155052-5ab1-4e32-b88f-1e7892adae3f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.342863] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e47926d4-cd8c-46b0-a5df-153f3ffbd706 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.350666] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Releasing lock "refresh_cache-aeb7928a-8307-49e7-b019-a4c674e6369a" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 659.350981] env[69994]: DEBUG nova.compute.manager [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Instance network_info: |[{"id": "23ce404a-01b4-4000-91a5-8532d84ccfff", "address": "fa:16:3e:17:49:e3", "network": {"id": "1cbd703c-4210-4eba-82b0-8d9ce1838326", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1943063431-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0337cba194454333bff45bdac0d1e371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "578f2ebc-5719-4d31-9bac-d3d247f9293f", "external-id": "nsx-vlan-transportzone-58", "segmentation_id": 58, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23ce404a-01", "ovs_interfaceid": "23ce404a-01b4-4000-91a5-8532d84ccfff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 659.353723] env[69994]: DEBUG oslo_concurrency.lockutils [req-21a7bde1-56a2-4327-a1ff-9d03c8dc2f1e req-f56c8cb6-7a17-417e-8af1-33341b01b2fa service nova] Acquired lock "refresh_cache-aeb7928a-8307-49e7-b019-a4c674e6369a" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 659.353911] env[69994]: DEBUG nova.network.neutron [req-21a7bde1-56a2-4327-a1ff-9d03c8dc2f1e req-f56c8cb6-7a17-417e-8af1-33341b01b2fa service nova] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Refreshing network info cache for port 23ce404a-01b4-4000-91a5-8532d84ccfff {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 659.355100] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:49:e3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '578f2ebc-5719-4d31-9bac-d3d247f9293f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '23ce404a-01b4-4000-91a5-8532d84ccfff', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 659.363601] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Creating folder: Project (0337cba194454333bff45bdac0d1e371). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 659.365243] env[69994]: DEBUG nova.network.neutron [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Port 1c4ae184-b8b0-409f-aff4-5568af2af1b9 binding to destination host cpu-1 is already ACTIVE {{(pid=69994) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 659.367678] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2c211ebf-d62c-45b5-86ba-35188425babe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.378766] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Instance VIF info [] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 659.384610] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 659.392132] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 659.392942] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5a6fd390-21b7-407b-9950-15df1d33a363 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.407354] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Created folder: Project (0337cba194454333bff45bdac0d1e371) in parent group-v647729. [ 659.407690] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Creating folder: Instances. Parent ref: group-v647784. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 659.411794] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bc46893d-a10c-4651-bb04-6af8c38c78f3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.416218] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 659.416218] env[69994]: value = "task-3241371" [ 659.416218] env[69994]: _type = "Task" [ 659.416218] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.424046] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Created folder: Instances in parent group-v647784. [ 659.424046] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 659.427149] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 659.427149] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241371, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.427149] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8511eb09-5823-4072-a85e-0a93de813f43 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.454325] env[69994]: DEBUG oslo_vmware.api [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Task: {'id': task-3241356, 'name': PowerOnVM_Task, 'duration_secs': 4.50676} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.455646] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 659.455860] env[69994]: INFO nova.compute.manager [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Took 12.07 seconds to spawn the instance on the hypervisor. [ 659.456053] env[69994]: DEBUG nova.compute.manager [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 659.456310] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 659.456310] env[69994]: value = "task-3241373" [ 659.456310] env[69994]: _type = "Task" [ 659.456310] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.457059] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12befc80-cada-4d25-9b46-094b127155c2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.474687] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241373, 'name': CreateVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.639027] env[69994]: DEBUG oslo_vmware.api [None req-024ddc30-cada-45d8-8427-0c45b266bd2e tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Task: {'id': task-3241369, 'name': PowerOffVM_Task, 'duration_secs': 0.187061} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.639027] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-024ddc30-cada-45d8-8427-0c45b266bd2e tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 659.639314] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-024ddc30-cada-45d8-8427-0c45b266bd2e tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 659.639314] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e5aa3e1b-236e-4014-9891-0ecadd86381a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.700634] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67a87d58-2496-42ef-9960-c179e752bcfc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.710592] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9713e8f0-7325-43a0-8f07-2ebc499ef83c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.717139] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-024ddc30-cada-45d8-8427-0c45b266bd2e tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 659.717139] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-024ddc30-cada-45d8-8427-0c45b266bd2e tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 659.717139] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-024ddc30-cada-45d8-8427-0c45b266bd2e tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Deleting the datastore file [datastore2] 2f710439-0216-401e-9759-af584f9bd00d {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 659.717139] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b7572cf3-024c-45f3-8fb2-b455f9a333db {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.750037] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14df2017-7653-4ad2-bf37-a44c7296fe88 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.753610] env[69994]: DEBUG oslo_vmware.api [None req-024ddc30-cada-45d8-8427-0c45b266bd2e tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Waiting for the task: (returnval){ [ 659.753610] env[69994]: value = "task-3241375" [ 659.753610] env[69994]: _type = "Task" [ 659.753610] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.761404] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38fd49d3-6502-4dda-adf6-ded4dd8b6067 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.768748] env[69994]: DEBUG oslo_vmware.api [None req-024ddc30-cada-45d8-8427-0c45b266bd2e tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Task: {'id': task-3241375, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.779752] env[69994]: DEBUG nova.compute.provider_tree [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 659.816443] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0aa49e10-4d50-40d9-9c11-900569bc17d6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Lock "c47c26c8-3f7f-436b-95aa-0bd08d41e62b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.826s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 659.926340] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241371, 'name': CreateVM_Task, 'duration_secs': 0.354911} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.926501] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 659.926908] env[69994]: DEBUG oslo_concurrency.lockutils [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.927089] env[69994]: DEBUG oslo_concurrency.lockutils [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 659.927449] env[69994]: DEBUG oslo_concurrency.lockutils [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 659.927718] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a9e8a5e-2b8c-4a45-af1f-fc8591926adc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.935976] env[69994]: DEBUG oslo_vmware.api [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Waiting for the task: (returnval){ [ 659.935976] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]521adf21-0b63-6418-70fa-8f664791ad8c" [ 659.935976] env[69994]: _type = "Task" [ 659.935976] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.945242] env[69994]: DEBUG oslo_vmware.api [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521adf21-0b63-6418-70fa-8f664791ad8c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.968737] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241373, 'name': CreateVM_Task, 'duration_secs': 0.411515} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.969240] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 659.969595] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.983929] env[69994]: INFO nova.compute.manager [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Took 33.13 seconds to build instance. [ 660.263685] env[69994]: DEBUG oslo_vmware.api [None req-024ddc30-cada-45d8-8427-0c45b266bd2e tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Task: {'id': task-3241375, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.422167} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.264076] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-024ddc30-cada-45d8-8427-0c45b266bd2e tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 660.264167] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-024ddc30-cada-45d8-8427-0c45b266bd2e tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 660.264354] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-024ddc30-cada-45d8-8427-0c45b266bd2e tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 660.264550] env[69994]: INFO nova.compute.manager [None req-024ddc30-cada-45d8-8427-0c45b266bd2e tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Took 1.16 seconds to destroy the instance on the hypervisor. [ 660.264788] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-024ddc30-cada-45d8-8427-0c45b266bd2e tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 660.264970] env[69994]: DEBUG nova.compute.manager [-] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 660.265076] env[69994]: DEBUG nova.network.neutron [-] [instance: 2f710439-0216-401e-9759-af584f9bd00d] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 660.283227] env[69994]: DEBUG nova.scheduler.client.report [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 660.319127] env[69994]: DEBUG nova.compute.manager [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 660.401728] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquiring lock "dbad6bed-64ba-4dfd-abad-c0b2c775ba2c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 660.401955] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Lock "dbad6bed-64ba-4dfd-abad-c0b2c775ba2c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 660.402144] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Lock "dbad6bed-64ba-4dfd-abad-c0b2c775ba2c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 660.448819] env[69994]: DEBUG oslo_vmware.api [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521adf21-0b63-6418-70fa-8f664791ad8c, 'name': SearchDatastore_Task, 'duration_secs': 0.043508} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.449326] env[69994]: DEBUG oslo_concurrency.lockutils [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 660.449643] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 660.449804] env[69994]: DEBUG oslo_concurrency.lockutils [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 660.449977] env[69994]: DEBUG oslo_concurrency.lockutils [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 660.450254] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 660.453019] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 660.453019] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 660.453019] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-acd02b62-516c-44ef-8d2a-8a065c6be183 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.455571] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c59b953-eafb-41f7-937e-b8a620ad2733 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.463559] env[69994]: DEBUG oslo_vmware.api [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Waiting for the task: (returnval){ [ 660.463559] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]528fa84f-c54e-461e-beb4-af1a57abb290" [ 660.463559] env[69994]: _type = "Task" [ 660.463559] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.465761] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 660.465980] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 660.470089] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c9e118c-e9d5-4e32-9b36-a08f31db104b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.478369] env[69994]: DEBUG oslo_vmware.api [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]528fa84f-c54e-461e-beb4-af1a57abb290, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.480360] env[69994]: DEBUG oslo_vmware.api [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Waiting for the task: (returnval){ [ 660.480360] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]522d0107-decf-d817-cfdd-b80731955a60" [ 660.480360] env[69994]: _type = "Task" [ 660.480360] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.488319] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1ea9e9b5-1617-4c60-8764-4374369f6ada tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Lock "5badecfd-5784-4968-8519-419a01c67465" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.878s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 660.488637] env[69994]: DEBUG oslo_vmware.api [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]522d0107-decf-d817-cfdd-b80731955a60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.791740] env[69994]: DEBUG oslo_concurrency.lockutils [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.773s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 660.792042] env[69994]: DEBUG nova.compute.manager [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 660.802088] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.080s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 660.802088] env[69994]: INFO nova.compute.claims [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 660.816944] env[69994]: DEBUG nova.network.neutron [req-21a7bde1-56a2-4327-a1ff-9d03c8dc2f1e req-f56c8cb6-7a17-417e-8af1-33341b01b2fa service nova] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Updated VIF entry in instance network info cache for port 23ce404a-01b4-4000-91a5-8532d84ccfff. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 660.817103] env[69994]: DEBUG nova.network.neutron [req-21a7bde1-56a2-4327-a1ff-9d03c8dc2f1e req-f56c8cb6-7a17-417e-8af1-33341b01b2fa service nova] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Updating instance_info_cache with network_info: [{"id": "23ce404a-01b4-4000-91a5-8532d84ccfff", "address": "fa:16:3e:17:49:e3", "network": {"id": "1cbd703c-4210-4eba-82b0-8d9ce1838326", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1943063431-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0337cba194454333bff45bdac0d1e371", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "578f2ebc-5719-4d31-9bac-d3d247f9293f", "external-id": "nsx-vlan-transportzone-58", "segmentation_id": 58, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23ce404a-01", "ovs_interfaceid": "23ce404a-01b4-4000-91a5-8532d84ccfff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 660.863156] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 660.976823] env[69994]: DEBUG oslo_vmware.api [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]528fa84f-c54e-461e-beb4-af1a57abb290, 'name': SearchDatastore_Task, 'duration_secs': 0.01603} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.977177] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 660.977347] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 660.977574] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 660.991277] env[69994]: DEBUG nova.compute.manager [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 660.997648] env[69994]: DEBUG oslo_vmware.api [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]522d0107-decf-d817-cfdd-b80731955a60, 'name': SearchDatastore_Task, 'duration_secs': 0.019769} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.998563] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c805a0d-017d-4ce4-964e-a14e7e5b04ec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.005737] env[69994]: DEBUG oslo_vmware.api [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Waiting for the task: (returnval){ [ 661.005737] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]524e7838-1ba7-1ee9-0f63-9b64c0ce3bb3" [ 661.005737] env[69994]: _type = "Task" [ 661.005737] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.021604] env[69994]: DEBUG oslo_vmware.api [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]524e7838-1ba7-1ee9-0f63-9b64c0ce3bb3, 'name': SearchDatastore_Task, 'duration_secs': 0.012331} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.021762] env[69994]: DEBUG oslo_concurrency.lockutils [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 661.022084] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 21f66039-6292-4d9c-b97d-668d029def24/21f66039-6292-4d9c-b97d-668d029def24.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 661.022658] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 661.022658] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 661.027019] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-38cb6abb-5fc8-43a4-95fd-a7f6ad3d0919 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.027019] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8a0e07fd-627e-4f7e-9d82-fe7ec509dd25 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.035898] env[69994]: DEBUG oslo_vmware.api [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Waiting for the task: (returnval){ [ 661.035898] env[69994]: value = "task-3241376" [ 661.035898] env[69994]: _type = "Task" [ 661.035898] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.036106] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 661.036441] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 661.043568] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c434159a-4664-460c-96cf-2987c22986fe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.055137] env[69994]: DEBUG oslo_vmware.api [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Task: {'id': task-3241376, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.055760] env[69994]: DEBUG oslo_vmware.api [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Waiting for the task: (returnval){ [ 661.055760] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5279ea7a-a5cf-1744-7941-5ec2687f4d39" [ 661.055760] env[69994]: _type = "Task" [ 661.055760] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.066953] env[69994]: DEBUG oslo_vmware.api [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5279ea7a-a5cf-1744-7941-5ec2687f4d39, 'name': SearchDatastore_Task, 'duration_secs': 0.010982} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.068752] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9374f664-50a0-4e11-a6fb-f0009ce80938 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.076420] env[69994]: DEBUG oslo_vmware.api [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Waiting for the task: (returnval){ [ 661.076420] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52c97880-718f-9e45-e394-59d7e05fb2ba" [ 661.076420] env[69994]: _type = "Task" [ 661.076420] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.084751] env[69994]: DEBUG oslo_vmware.api [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52c97880-718f-9e45-e394-59d7e05fb2ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.313856] env[69994]: DEBUG nova.compute.utils [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 661.322659] env[69994]: DEBUG nova.compute.manager [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 661.323469] env[69994]: DEBUG nova.network.neutron [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 661.325927] env[69994]: DEBUG oslo_concurrency.lockutils [req-21a7bde1-56a2-4327-a1ff-9d03c8dc2f1e req-f56c8cb6-7a17-417e-8af1-33341b01b2fa service nova] Releasing lock "refresh_cache-aeb7928a-8307-49e7-b019-a4c674e6369a" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 661.459228] env[69994]: DEBUG nova.policy [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0bdb2acbbdc04bec8c21d69912b45f43', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '97b5a4565fa644a4a510beb5ba006afb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 661.517518] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquiring lock "refresh_cache-dbad6bed-64ba-4dfd-abad-c0b2c775ba2c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.517518] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquired lock "refresh_cache-dbad6bed-64ba-4dfd-abad-c0b2c775ba2c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 661.517665] env[69994]: DEBUG nova.network.neutron [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 661.525349] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 661.550801] env[69994]: DEBUG oslo_vmware.api [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Task: {'id': task-3241376, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.592502] env[69994]: DEBUG nova.network.neutron [-] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.594390] env[69994]: DEBUG oslo_vmware.api [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52c97880-718f-9e45-e394-59d7e05fb2ba, 'name': SearchDatastore_Task, 'duration_secs': 0.009966} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.594390] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 661.594558] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] aeb7928a-8307-49e7-b019-a4c674e6369a/aeb7928a-8307-49e7-b019-a4c674e6369a.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 661.594962] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2dc895fd-8246-4d93-a725-1d3d0518625c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.609478] env[69994]: DEBUG oslo_vmware.api [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Waiting for the task: (returnval){ [ 661.609478] env[69994]: value = "task-3241377" [ 661.609478] env[69994]: _type = "Task" [ 661.609478] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.622294] env[69994]: DEBUG oslo_vmware.api [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241377, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.825722] env[69994]: DEBUG nova.compute.manager [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 662.013903] env[69994]: DEBUG nova.network.neutron [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Successfully created port: d451c9f0-137b-44de-a79c-ec92c6f843bc {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 662.053417] env[69994]: DEBUG oslo_vmware.api [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Task: {'id': task-3241376, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.614893} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.055320] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 21f66039-6292-4d9c-b97d-668d029def24/21f66039-6292-4d9c-b97d-668d029def24.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 662.055472] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 662.057105] env[69994]: DEBUG nova.compute.manager [req-1e4f9297-2838-4ffb-bb27-e8248f71dfa1 req-b633b2c2-3efa-4ccc-a0f2-a5ee2dd3d6ad service nova] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Received event network-vif-deleted-b3347b62-0c9c-4b6c-8d07-587f2423850c {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 662.060968] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-329252d7-7689-4721-8a4a-b24cf9b8531d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.073387] env[69994]: DEBUG oslo_vmware.api [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Waiting for the task: (returnval){ [ 662.073387] env[69994]: value = "task-3241378" [ 662.073387] env[69994]: _type = "Task" [ 662.073387] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.085384] env[69994]: DEBUG oslo_vmware.api [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Task: {'id': task-3241378, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.103766] env[69994]: INFO nova.compute.manager [-] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Took 1.84 seconds to deallocate network for instance. [ 662.129096] env[69994]: DEBUG oslo_vmware.api [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241377, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.198720] env[69994]: INFO nova.compute.manager [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Rescuing [ 662.198720] env[69994]: DEBUG oslo_concurrency.lockutils [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Acquiring lock "refresh_cache-c47c26c8-3f7f-436b-95aa-0bd08d41e62b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.198844] env[69994]: DEBUG oslo_concurrency.lockutils [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Acquired lock "refresh_cache-c47c26c8-3f7f-436b-95aa-0bd08d41e62b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 662.198997] env[69994]: DEBUG nova.network.neutron [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 662.483797] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32813ddb-ff93-40a7-9ce2-b3d44dba77ff {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.491985] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71b64b0c-8f57-4f46-b16d-261e248c06ff {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.525237] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8191040e-eae4-4d54-be26-dae69396b2b4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.535417] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6c254fb-afd5-425b-84ad-3f7854cc0ecd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.553506] env[69994]: DEBUG nova.compute.provider_tree [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 662.582482] env[69994]: DEBUG oslo_vmware.api [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Task: {'id': task-3241378, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.162304} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.582990] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 662.583670] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eb58a07-7925-43db-8ed0-daaa54d5ea07 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.606105] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Reconfiguring VM instance instance-0000000d to attach disk [datastore2] 21f66039-6292-4d9c-b97d-668d029def24/21f66039-6292-4d9c-b97d-668d029def24.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 662.606648] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-015d2dcb-99f1-49bb-8eb3-fa207123af39 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.624838] env[69994]: DEBUG oslo_concurrency.lockutils [None req-024ddc30-cada-45d8-8427-0c45b266bd2e tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 662.633984] env[69994]: DEBUG oslo_vmware.api [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241377, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.55986} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.635863] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] aeb7928a-8307-49e7-b019-a4c674e6369a/aeb7928a-8307-49e7-b019-a4c674e6369a.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 662.635863] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 662.636152] env[69994]: DEBUG oslo_vmware.api [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Waiting for the task: (returnval){ [ 662.636152] env[69994]: value = "task-3241379" [ 662.636152] env[69994]: _type = "Task" [ 662.636152] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.636338] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cb0c2fe1-0542-4e1d-b3a1-7b466b717460 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.647536] env[69994]: DEBUG oslo_vmware.api [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Waiting for the task: (returnval){ [ 662.647536] env[69994]: value = "task-3241380" [ 662.647536] env[69994]: _type = "Task" [ 662.647536] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.651384] env[69994]: DEBUG oslo_vmware.api [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Task: {'id': task-3241379, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.661915] env[69994]: DEBUG oslo_vmware.api [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241380, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.672898] env[69994]: DEBUG nova.network.neutron [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Updating instance_info_cache with network_info: [{"id": "1c4ae184-b8b0-409f-aff4-5568af2af1b9", "address": "fa:16:3e:23:d3:c2", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.61", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c4ae184-b8", "ovs_interfaceid": "1c4ae184-b8b0-409f-aff4-5568af2af1b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.840153] env[69994]: DEBUG nova.compute.manager [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 662.847182] env[69994]: DEBUG nova.compute.manager [None req-bda13e80-9f5e-4eb4-bc7a-9e1a6a238067 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 662.848086] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4823bc5c-ad07-4092-9bdf-29330f0bcf87 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.876948] env[69994]: DEBUG nova.virt.hardware [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 662.878041] env[69994]: DEBUG nova.virt.hardware [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 662.878041] env[69994]: DEBUG nova.virt.hardware [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 662.878041] env[69994]: DEBUG nova.virt.hardware [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 662.878041] env[69994]: DEBUG nova.virt.hardware [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 662.878399] env[69994]: DEBUG nova.virt.hardware [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 662.878815] env[69994]: DEBUG nova.virt.hardware [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 662.880112] env[69994]: DEBUG nova.virt.hardware [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 662.880112] env[69994]: DEBUG nova.virt.hardware [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 662.880112] env[69994]: DEBUG nova.virt.hardware [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 662.880112] env[69994]: DEBUG nova.virt.hardware [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 662.881187] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-583ffec9-9918-434d-aa4e-1b807f41c85e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.891241] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4e02cd6-1538-4092-b117-717378e1b570 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.061608] env[69994]: DEBUG nova.scheduler.client.report [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 663.127264] env[69994]: DEBUG nova.network.neutron [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Updating instance_info_cache with network_info: [{"id": "641f1973-439b-47b8-a402-9d7a8557e0c2", "address": "fa:16:3e:11:89:23", "network": {"id": "0edf701f-8d15-4cdd-a234-ebea64cfa425", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-580718484-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8fb027b5b61c43cdbac3c89eb1e0f2a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap641f1973-43", "ovs_interfaceid": "641f1973-439b-47b8-a402-9d7a8557e0c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.153766] env[69994]: DEBUG oslo_vmware.api [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Task: {'id': task-3241379, 'name': ReconfigVM_Task, 'duration_secs': 0.345529} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.155429] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Reconfigured VM instance instance-0000000d to attach disk [datastore2] 21f66039-6292-4d9c-b97d-668d029def24/21f66039-6292-4d9c-b97d-668d029def24.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 663.156427] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4aa79728-60f2-47de-b156-c4a8df85c489 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.164347] env[69994]: DEBUG oslo_vmware.api [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241380, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082032} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.166419] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 663.168037] env[69994]: DEBUG oslo_vmware.api [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Waiting for the task: (returnval){ [ 663.168037] env[69994]: value = "task-3241381" [ 663.168037] env[69994]: _type = "Task" [ 663.168037] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.168037] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-345eabf5-46c3-425a-a753-7d25d8e9094b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.175285] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Releasing lock "refresh_cache-dbad6bed-64ba-4dfd-abad-c0b2c775ba2c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 663.213479] env[69994]: DEBUG oslo_vmware.api [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Task: {'id': task-3241381, 'name': Rename_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.217930] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Reconfiguring VM instance instance-00000013 to attach disk [datastore2] aeb7928a-8307-49e7-b019-a4c674e6369a/aeb7928a-8307-49e7-b019-a4c674e6369a.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 663.218641] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a6c15864-1c93-4d31-afc8-6c6702b63e38 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.244659] env[69994]: DEBUG oslo_vmware.api [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Waiting for the task: (returnval){ [ 663.244659] env[69994]: value = "task-3241382" [ 663.244659] env[69994]: _type = "Task" [ 663.244659] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.255174] env[69994]: DEBUG oslo_vmware.api [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241382, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.363151] env[69994]: INFO nova.compute.manager [None req-bda13e80-9f5e-4eb4-bc7a-9e1a6a238067 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] instance snapshotting [ 663.366646] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e8ff395-c2d1-40ff-a807-94612583de1a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.392863] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c46a938-e760-4cb0-b9c0-592d733afa41 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.572352] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.774s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 663.575894] env[69994]: DEBUG nova.compute.manager [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 663.583654] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.813s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 663.587922] env[69994]: INFO nova.compute.claims [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 663.618622] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f97d01bd-00e7-4500-967f-70f38331d4cf tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Acquiring lock "5badecfd-5784-4968-8519-419a01c67465" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 663.622020] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f97d01bd-00e7-4500-967f-70f38331d4cf tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Lock "5badecfd-5784-4968-8519-419a01c67465" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 663.622020] env[69994]: INFO nova.compute.manager [None req-f97d01bd-00e7-4500-967f-70f38331d4cf tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Rebooting instance [ 663.628507] env[69994]: DEBUG oslo_concurrency.lockutils [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Releasing lock "refresh_cache-c47c26c8-3f7f-436b-95aa-0bd08d41e62b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 663.686575] env[69994]: DEBUG oslo_vmware.api [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Task: {'id': task-3241381, 'name': Rename_Task, 'duration_secs': 0.167518} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.686575] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 663.686575] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d46e3af7-23eb-4f8b-aa33-b463ef7498a4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.695604] env[69994]: DEBUG oslo_vmware.api [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Waiting for the task: (returnval){ [ 663.695604] env[69994]: value = "task-3241383" [ 663.695604] env[69994]: _type = "Task" [ 663.695604] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.713851] env[69994]: DEBUG oslo_vmware.api [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Task: {'id': task-3241383, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.745171] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2568e7b0-072f-4ebb-a9ee-3aced5c59236 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.773923] env[69994]: DEBUG oslo_vmware.api [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241382, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.774983] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38b1179b-f2c2-4df3-993c-c026df301aaf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.783243] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Updating instance 'dbad6bed-64ba-4dfd-abad-c0b2c775ba2c' progress to 83 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 663.904931] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bda13e80-9f5e-4eb4-bc7a-9e1a6a238067 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Creating Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 663.906562] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-851ba04a-827f-4688-8de3-e410b0e98d66 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.916059] env[69994]: DEBUG oslo_vmware.api [None req-bda13e80-9f5e-4eb4-bc7a-9e1a6a238067 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for the task: (returnval){ [ 663.916059] env[69994]: value = "task-3241384" [ 663.916059] env[69994]: _type = "Task" [ 663.916059] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.929750] env[69994]: DEBUG oslo_vmware.api [None req-bda13e80-9f5e-4eb4-bc7a-9e1a6a238067 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241384, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.002278] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Acquiring lock "744fe018-d12c-44c2-98f1-c11fbfffc98e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 664.002672] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Lock "744fe018-d12c-44c2-98f1-c11fbfffc98e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 664.093413] env[69994]: DEBUG nova.compute.utils [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 664.098020] env[69994]: DEBUG nova.compute.manager [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 664.098020] env[69994]: DEBUG nova.network.neutron [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 664.162320] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f97d01bd-00e7-4500-967f-70f38331d4cf tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Acquiring lock "refresh_cache-5badecfd-5784-4968-8519-419a01c67465" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.162520] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f97d01bd-00e7-4500-967f-70f38331d4cf tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Acquired lock "refresh_cache-5badecfd-5784-4968-8519-419a01c67465" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 664.162695] env[69994]: DEBUG nova.network.neutron [None req-f97d01bd-00e7-4500-967f-70f38331d4cf tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 664.214770] env[69994]: DEBUG oslo_vmware.api [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Task: {'id': task-3241383, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.236656] env[69994]: DEBUG nova.policy [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5f2b4659f30f4b9db4627d3d3abb6ba5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '605d72502cc644bfa4d875bf348246de', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 664.260500] env[69994]: DEBUG oslo_vmware.api [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241382, 'name': ReconfigVM_Task, 'duration_secs': 0.57526} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.260912] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Reconfigured VM instance instance-00000013 to attach disk [datastore2] aeb7928a-8307-49e7-b019-a4c674e6369a/aeb7928a-8307-49e7-b019-a4c674e6369a.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 664.261646] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cc7c2535-4ce2-4016-a499-b7c3a590bd28 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.268742] env[69994]: DEBUG oslo_vmware.api [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Waiting for the task: (returnval){ [ 664.268742] env[69994]: value = "task-3241385" [ 664.268742] env[69994]: _type = "Task" [ 664.268742] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.277991] env[69994]: DEBUG oslo_vmware.api [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241385, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.292705] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 664.293089] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3b0dde87-7cf1-43c1-8c1f-9dcbba9dc2ed {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.300282] env[69994]: DEBUG oslo_vmware.api [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for the task: (returnval){ [ 664.300282] env[69994]: value = "task-3241386" [ 664.300282] env[69994]: _type = "Task" [ 664.300282] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.315216] env[69994]: DEBUG oslo_vmware.api [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241386, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.433073] env[69994]: DEBUG oslo_vmware.api [None req-bda13e80-9f5e-4eb4-bc7a-9e1a6a238067 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241384, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.600866] env[69994]: DEBUG nova.compute.manager [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 664.709714] env[69994]: DEBUG oslo_vmware.api [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Task: {'id': task-3241383, 'name': PowerOnVM_Task, 'duration_secs': 0.637532} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.710102] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 664.710319] env[69994]: DEBUG nova.compute.manager [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 664.711105] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-865c8369-ae3d-43c3-ade9-c1c02646e84b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.783949] env[69994]: DEBUG oslo_vmware.api [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241385, 'name': Rename_Task, 'duration_secs': 0.19163} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.786689] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 664.792148] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-57d7c90a-05b3-438c-bd1e-b8642babf53d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.795750] env[69994]: DEBUG oslo_vmware.api [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Waiting for the task: (returnval){ [ 664.795750] env[69994]: value = "task-3241387" [ 664.795750] env[69994]: _type = "Task" [ 664.795750] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.804144] env[69994]: DEBUG oslo_vmware.api [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241387, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.818898] env[69994]: DEBUG oslo_vmware.api [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241386, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.931211] env[69994]: DEBUG oslo_vmware.api [None req-bda13e80-9f5e-4eb4-bc7a-9e1a6a238067 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241384, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.012212] env[69994]: DEBUG nova.network.neutron [None req-f97d01bd-00e7-4500-967f-70f38331d4cf tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Updating instance_info_cache with network_info: [{"id": "097f8c85-cd23-443b-8f4a-aae58ce5d392", "address": "fa:16:3e:c9:7d:47", "network": {"id": "2fcc4ee1-e8f5-4288-9c43-e97a20e8e0e7", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-471387069-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39d3a367749e4a169ce2ad95e4600d49", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b107fab-ee71-47db-ad4d-3c6f05546843", "external-id": "cl2-zone-554", "segmentation_id": 554, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap097f8c85-cd", "ovs_interfaceid": "097f8c85-cd23-443b-8f4a-aae58ce5d392", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.049469] env[69994]: DEBUG nova.network.neutron [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Successfully updated port: d451c9f0-137b-44de-a79c-ec92c6f843bc {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 665.178165] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 665.178679] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4d578efe-4e7d-4b3d-986e-d887e10ad6aa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.184067] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32868d93-5b1d-4a57-9a11-72c771f9dd0d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.189251] env[69994]: DEBUG oslo_vmware.api [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for the task: (returnval){ [ 665.189251] env[69994]: value = "task-3241388" [ 665.189251] env[69994]: _type = "Task" [ 665.189251] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.196170] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a44895f-a37b-4793-afce-bd7eae90313a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.202769] env[69994]: DEBUG oslo_vmware.api [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241388, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.237695] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feed12a6-d71b-4af7-9f39-a53bdf6bc2a2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.243927] env[69994]: DEBUG oslo_concurrency.lockutils [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 665.246518] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4fb7b19-dc9a-4d0a-b02c-2320ce6e203e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.262489] env[69994]: DEBUG nova.compute.provider_tree [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 665.308702] env[69994]: DEBUG oslo_vmware.api [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241387, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.315867] env[69994]: DEBUG oslo_vmware.api [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241386, 'name': PowerOnVM_Task, 'duration_secs': 0.742289} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.316144] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 665.316337] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1d2214b1-a6cd-4186-840e-0b4d5d9a8319 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Updating instance 'dbad6bed-64ba-4dfd-abad-c0b2c775ba2c' progress to 100 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 665.411928] env[69994]: DEBUG nova.compute.manager [req-7b14402d-cb3a-4513-9bd0-f58f23d799f0 req-1a4e51d4-3372-4d04-846d-e1d5004d92ad service nova] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Received event network-vif-plugged-d451c9f0-137b-44de-a79c-ec92c6f843bc {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 665.412206] env[69994]: DEBUG oslo_concurrency.lockutils [req-7b14402d-cb3a-4513-9bd0-f58f23d799f0 req-1a4e51d4-3372-4d04-846d-e1d5004d92ad service nova] Acquiring lock "1d548f54-4ffa-4299-9212-717350558ad4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 665.412370] env[69994]: DEBUG oslo_concurrency.lockutils [req-7b14402d-cb3a-4513-9bd0-f58f23d799f0 req-1a4e51d4-3372-4d04-846d-e1d5004d92ad service nova] Lock "1d548f54-4ffa-4299-9212-717350558ad4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 665.412539] env[69994]: DEBUG oslo_concurrency.lockutils [req-7b14402d-cb3a-4513-9bd0-f58f23d799f0 req-1a4e51d4-3372-4d04-846d-e1d5004d92ad service nova] Lock "1d548f54-4ffa-4299-9212-717350558ad4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 665.412702] env[69994]: DEBUG nova.compute.manager [req-7b14402d-cb3a-4513-9bd0-f58f23d799f0 req-1a4e51d4-3372-4d04-846d-e1d5004d92ad service nova] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] No waiting events found dispatching network-vif-plugged-d451c9f0-137b-44de-a79c-ec92c6f843bc {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 665.412862] env[69994]: WARNING nova.compute.manager [req-7b14402d-cb3a-4513-9bd0-f58f23d799f0 req-1a4e51d4-3372-4d04-846d-e1d5004d92ad service nova] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Received unexpected event network-vif-plugged-d451c9f0-137b-44de-a79c-ec92c6f843bc for instance with vm_state building and task_state spawning. [ 665.430183] env[69994]: DEBUG oslo_vmware.api [None req-bda13e80-9f5e-4eb4-bc7a-9e1a6a238067 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241384, 'name': CreateSnapshot_Task, 'duration_secs': 1.20996} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.430486] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bda13e80-9f5e-4eb4-bc7a-9e1a6a238067 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Created Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 665.431295] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ac3e111-ac53-4d33-aaa9-caf7a06ea3d9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.500235] env[69994]: DEBUG nova.network.neutron [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Successfully created port: 003af7d4-a8a5-43d4-b032-96df0b4ae173 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 665.516763] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f97d01bd-00e7-4500-967f-70f38331d4cf tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Releasing lock "refresh_cache-5badecfd-5784-4968-8519-419a01c67465" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 665.553936] env[69994]: DEBUG oslo_concurrency.lockutils [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Acquiring lock "refresh_cache-1d548f54-4ffa-4299-9212-717350558ad4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.554326] env[69994]: DEBUG oslo_concurrency.lockutils [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Acquired lock "refresh_cache-1d548f54-4ffa-4299-9212-717350558ad4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 665.554547] env[69994]: DEBUG nova.network.neutron [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 665.627655] env[69994]: DEBUG nova.compute.manager [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 665.658722] env[69994]: DEBUG nova.virt.hardware [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 665.659692] env[69994]: DEBUG nova.virt.hardware [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 665.659870] env[69994]: DEBUG nova.virt.hardware [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 665.660203] env[69994]: DEBUG nova.virt.hardware [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 665.660385] env[69994]: DEBUG nova.virt.hardware [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 665.660610] env[69994]: DEBUG nova.virt.hardware [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 665.660865] env[69994]: DEBUG nova.virt.hardware [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 665.661109] env[69994]: DEBUG nova.virt.hardware [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 665.661326] env[69994]: DEBUG nova.virt.hardware [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 665.664322] env[69994]: DEBUG nova.virt.hardware [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 665.664543] env[69994]: DEBUG nova.virt.hardware [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 665.665436] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66b8965e-0873-49b3-a94a-bd48d866422d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.675328] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b3f9fbd-7e4a-4092-a393-5d93fc79a40f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.702024] env[69994]: DEBUG oslo_vmware.api [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241388, 'name': PowerOffVM_Task, 'duration_secs': 0.20422} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.702294] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 665.703083] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95c5fea3-3159-4fbe-a8fc-0806564580c9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.722223] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd115c74-4186-47cc-9873-3e6ddff32664 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.755603] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 665.755910] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-52326bd3-36ee-4a6b-be5b-9aef562779dc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.763997] env[69994]: DEBUG oslo_vmware.api [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for the task: (returnval){ [ 665.763997] env[69994]: value = "task-3241389" [ 665.763997] env[69994]: _type = "Task" [ 665.763997] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.768178] env[69994]: DEBUG nova.scheduler.client.report [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 665.776826] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] VM already powered off {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 665.777042] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 665.777316] env[69994]: DEBUG oslo_concurrency.lockutils [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.777467] env[69994]: DEBUG oslo_concurrency.lockutils [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 665.777647] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 665.777886] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0d8a1691-f94e-43b3-9d75-0d9973dd5c12 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.785771] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 665.785970] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 665.786687] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30f0f0ff-0728-4656-8bf5-809a27dd6ab4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.792657] env[69994]: DEBUG oslo_vmware.api [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for the task: (returnval){ [ 665.792657] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5225f09f-70ce-e262-9b43-96fd5b822cc4" [ 665.792657] env[69994]: _type = "Task" [ 665.792657] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.809164] env[69994]: DEBUG oslo_vmware.api [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241387, 'name': PowerOnVM_Task, 'duration_secs': 0.755787} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.809617] env[69994]: DEBUG oslo_vmware.api [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5225f09f-70ce-e262-9b43-96fd5b822cc4, 'name': SearchDatastore_Task, 'duration_secs': 0.010708} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.809977] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 665.809977] env[69994]: INFO nova.compute.manager [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Took 10.17 seconds to spawn the instance on the hypervisor. [ 665.810238] env[69994]: DEBUG nova.compute.manager [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 665.811731] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18e36e65-9c58-42ad-8f22-3cf600a4373c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.815280] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d1e4f6c-6ba8-43df-be31-9fcb91dc8672 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.824751] env[69994]: DEBUG oslo_vmware.api [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for the task: (returnval){ [ 665.824751] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]521622be-ef8a-e344-a023-10e151082969" [ 665.824751] env[69994]: _type = "Task" [ 665.824751] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.841649] env[69994]: DEBUG oslo_vmware.api [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521622be-ef8a-e344-a023-10e151082969, 'name': SearchDatastore_Task, 'duration_secs': 0.00939} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.841909] env[69994]: DEBUG oslo_concurrency.lockutils [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 665.842279] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] c47c26c8-3f7f-436b-95aa-0bd08d41e62b/cc2e14cc-b12f-480a-a387-dd21e9efda8b-rescue.vmdk. {{(pid=69994) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 665.843345] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-299ef4b2-e86c-46ee-8651-679fb9db305b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.853957] env[69994]: DEBUG oslo_vmware.api [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for the task: (returnval){ [ 665.853957] env[69994]: value = "task-3241390" [ 665.853957] env[69994]: _type = "Task" [ 665.853957] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.863576] env[69994]: DEBUG oslo_vmware.api [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241390, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.953041] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bda13e80-9f5e-4eb4-bc7a-9e1a6a238067 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Creating linked-clone VM from snapshot {{(pid=69994) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 665.953366] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-57939ece-06e4-4279-ae7a-24183395aac2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.962596] env[69994]: DEBUG oslo_vmware.api [None req-bda13e80-9f5e-4eb4-bc7a-9e1a6a238067 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for the task: (returnval){ [ 665.962596] env[69994]: value = "task-3241391" [ 665.962596] env[69994]: _type = "Task" [ 665.962596] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.973380] env[69994]: DEBUG oslo_vmware.api [None req-bda13e80-9f5e-4eb4-bc7a-9e1a6a238067 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241391, 'name': CloneVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.021918] env[69994]: DEBUG nova.compute.manager [None req-f97d01bd-00e7-4500-967f-70f38331d4cf tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 666.022808] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e9ea09a-ed73-4842-b342-9e556bc3b9c8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.146314] env[69994]: DEBUG nova.network.neutron [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 666.274893] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.691s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 666.275563] env[69994]: DEBUG nova.compute.manager [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 666.281024] env[69994]: DEBUG oslo_concurrency.lockutils [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.505s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 666.281024] env[69994]: INFO nova.compute.claims [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 666.347258] env[69994]: INFO nova.compute.manager [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Took 31.13 seconds to build instance. [ 666.367982] env[69994]: DEBUG oslo_vmware.api [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241390, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.507079} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.368505] env[69994]: INFO nova.virt.vmwareapi.ds_util [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] c47c26c8-3f7f-436b-95aa-0bd08d41e62b/cc2e14cc-b12f-480a-a387-dd21e9efda8b-rescue.vmdk. [ 666.369479] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ca6231c-eb2d-4232-b4f5-a90621ff19be {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.402756] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Reconfiguring VM instance instance-00000012 to attach disk [datastore2] c47c26c8-3f7f-436b-95aa-0bd08d41e62b/cc2e14cc-b12f-480a-a387-dd21e9efda8b-rescue.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 666.406443] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-22997f86-b664-4a45-81c0-2c2d52012cff {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.430761] env[69994]: DEBUG oslo_vmware.api [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for the task: (returnval){ [ 666.430761] env[69994]: value = "task-3241392" [ 666.430761] env[69994]: _type = "Task" [ 666.430761] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.442637] env[69994]: DEBUG oslo_vmware.api [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241392, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.447881] env[69994]: DEBUG oslo_concurrency.lockutils [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Acquiring lock "289cbcc2-cd8f-4c4f-9169-a897f5527de1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 666.448122] env[69994]: DEBUG oslo_concurrency.lockutils [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Lock "289cbcc2-cd8f-4c4f-9169-a897f5527de1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 666.473892] env[69994]: DEBUG nova.network.neutron [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Updating instance_info_cache with network_info: [{"id": "d451c9f0-137b-44de-a79c-ec92c6f843bc", "address": "fa:16:3e:c1:68:d9", "network": {"id": "a40a358d-770e-4ef2-a47e-4e4a4532f844", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1348265973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97b5a4565fa644a4a510beb5ba006afb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd451c9f0-13", "ovs_interfaceid": "d451c9f0-137b-44de-a79c-ec92c6f843bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 666.475281] env[69994]: DEBUG oslo_vmware.api [None req-bda13e80-9f5e-4eb4-bc7a-9e1a6a238067 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241391, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.787524] env[69994]: DEBUG nova.compute.utils [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 666.791716] env[69994]: DEBUG nova.compute.manager [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 666.791899] env[69994]: DEBUG nova.network.neutron [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: ad957c30-c923-4bbf-8841-00e99de44781] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 666.850925] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8ee84bce-377a-4267-b796-b8c8035c7a38 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Lock "aeb7928a-8307-49e7-b019-a4c674e6369a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.329s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 666.941749] env[69994]: DEBUG oslo_vmware.api [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241392, 'name': ReconfigVM_Task, 'duration_secs': 0.351106} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.943282] env[69994]: DEBUG nova.policy [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fe6552e09b9e4afa96aa8d1960046951', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fbd311dd96cb4ab5b0b75bf914842ce8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 666.945058] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Reconfigured VM instance instance-00000012 to attach disk [datastore2] c47c26c8-3f7f-436b-95aa-0bd08d41e62b/cc2e14cc-b12f-480a-a387-dd21e9efda8b-rescue.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 666.945750] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77720e93-aa09-4abc-9bef-f2b4d07c7201 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.977057] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b8c936ca-f087-4720-ad43-e26d3e9076d0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.985808] env[69994]: DEBUG oslo_concurrency.lockutils [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Releasing lock "refresh_cache-1d548f54-4ffa-4299-9212-717350558ad4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 666.986141] env[69994]: DEBUG nova.compute.manager [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Instance network_info: |[{"id": "d451c9f0-137b-44de-a79c-ec92c6f843bc", "address": "fa:16:3e:c1:68:d9", "network": {"id": "a40a358d-770e-4ef2-a47e-4e4a4532f844", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1348265973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97b5a4565fa644a4a510beb5ba006afb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd451c9f0-13", "ovs_interfaceid": "d451c9f0-137b-44de-a79c-ec92c6f843bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 666.986993] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c1:68:d9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7874ee7f-20c7-4bd8-a750-ed489e9acc65', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd451c9f0-137b-44de-a79c-ec92c6f843bc', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 666.994615] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Creating folder: Project (97b5a4565fa644a4a510beb5ba006afb). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 666.999245] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a4008868-14f4-4acf-9df8-f47b2c66e961 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.001248] env[69994]: DEBUG oslo_vmware.api [None req-bda13e80-9f5e-4eb4-bc7a-9e1a6a238067 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241391, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.002894] env[69994]: DEBUG oslo_vmware.api [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for the task: (returnval){ [ 667.002894] env[69994]: value = "task-3241393" [ 667.002894] env[69994]: _type = "Task" [ 667.002894] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.011307] env[69994]: DEBUG oslo_vmware.api [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241393, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.012621] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Created folder: Project (97b5a4565fa644a4a510beb5ba006afb) in parent group-v647729. [ 667.012837] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Creating folder: Instances. Parent ref: group-v647790. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 667.013108] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9311ff79-9594-4d6b-8cb7-2fbf863f31e5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.022268] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Created folder: Instances in parent group-v647790. [ 667.022566] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 667.022768] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 667.022981] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2ecc7b04-9093-458a-9f95-bd8fd8e8778d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.039908] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb508bb4-69f2-4db5-9a54-1b42a0957340 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.044298] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 667.044298] env[69994]: value = "task-3241396" [ 667.044298] env[69994]: _type = "Task" [ 667.044298] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.050547] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f97d01bd-00e7-4500-967f-70f38331d4cf tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Doing hard reboot of VM {{(pid=69994) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 667.054031] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-33b91aa5-5bb5-4ec5-9949-5e093c23dc8f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.056077] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241396, 'name': CreateVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.060661] env[69994]: DEBUG oslo_vmware.api [None req-f97d01bd-00e7-4500-967f-70f38331d4cf tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Waiting for the task: (returnval){ [ 667.060661] env[69994]: value = "task-3241397" [ 667.060661] env[69994]: _type = "Task" [ 667.060661] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.071016] env[69994]: DEBUG oslo_vmware.api [None req-f97d01bd-00e7-4500-967f-70f38331d4cf tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Task: {'id': task-3241397, 'name': ResetVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.275519] env[69994]: DEBUG oslo_concurrency.lockutils [None req-476fd883-e2c1-473b-92e5-8ae3b18e70b3 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Acquiring lock "21f66039-6292-4d9c-b97d-668d029def24" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 667.275804] env[69994]: DEBUG oslo_concurrency.lockutils [None req-476fd883-e2c1-473b-92e5-8ae3b18e70b3 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Lock "21f66039-6292-4d9c-b97d-668d029def24" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 667.275985] env[69994]: DEBUG oslo_concurrency.lockutils [None req-476fd883-e2c1-473b-92e5-8ae3b18e70b3 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Acquiring lock "21f66039-6292-4d9c-b97d-668d029def24-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 667.276179] env[69994]: DEBUG oslo_concurrency.lockutils [None req-476fd883-e2c1-473b-92e5-8ae3b18e70b3 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Lock "21f66039-6292-4d9c-b97d-668d029def24-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 667.276349] env[69994]: DEBUG oslo_concurrency.lockutils [None req-476fd883-e2c1-473b-92e5-8ae3b18e70b3 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Lock "21f66039-6292-4d9c-b97d-668d029def24-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 667.278943] env[69994]: INFO nova.compute.manager [None req-476fd883-e2c1-473b-92e5-8ae3b18e70b3 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Terminating instance [ 667.293546] env[69994]: DEBUG nova.compute.manager [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 667.355064] env[69994]: DEBUG nova.compute.manager [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 667.486955] env[69994]: DEBUG oslo_vmware.api [None req-bda13e80-9f5e-4eb4-bc7a-9e1a6a238067 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241391, 'name': CloneVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.516000] env[69994]: DEBUG oslo_vmware.api [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241393, 'name': ReconfigVM_Task, 'duration_secs': 0.255309} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.519061] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 667.519740] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-79dd86ff-509b-458a-b1b2-9a93b2d7e0c5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.529026] env[69994]: DEBUG oslo_vmware.api [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for the task: (returnval){ [ 667.529026] env[69994]: value = "task-3241398" [ 667.529026] env[69994]: _type = "Task" [ 667.529026] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.545288] env[69994]: DEBUG oslo_vmware.api [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241398, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.560384] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241396, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.572159] env[69994]: DEBUG oslo_vmware.api [None req-f97d01bd-00e7-4500-967f-70f38331d4cf tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Task: {'id': task-3241397, 'name': ResetVM_Task, 'duration_secs': 0.110442} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.572314] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f97d01bd-00e7-4500-967f-70f38331d4cf tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Did hard reboot of VM {{(pid=69994) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 667.572507] env[69994]: DEBUG nova.compute.manager [None req-f97d01bd-00e7-4500-967f-70f38331d4cf tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 667.573336] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b347fadb-b3e3-496a-9af7-c6a47c2bbca6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.703999] env[69994]: DEBUG nova.network.neutron [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Successfully created port: 68ff6b46-787c-4682-8626-0a9c421f1cd7 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 667.785388] env[69994]: DEBUG oslo_concurrency.lockutils [None req-476fd883-e2c1-473b-92e5-8ae3b18e70b3 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Acquiring lock "refresh_cache-21f66039-6292-4d9c-b97d-668d029def24" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 667.785968] env[69994]: DEBUG oslo_concurrency.lockutils [None req-476fd883-e2c1-473b-92e5-8ae3b18e70b3 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Acquired lock "refresh_cache-21f66039-6292-4d9c-b97d-668d029def24" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 667.786595] env[69994]: DEBUG nova.network.neutron [None req-476fd883-e2c1-473b-92e5-8ae3b18e70b3 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 667.888116] env[69994]: DEBUG nova.network.neutron [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Successfully updated port: 003af7d4-a8a5-43d4-b032-96df0b4ae173 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 667.890358] env[69994]: DEBUG oslo_concurrency.lockutils [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 667.969222] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48e28502-6a88-454e-b869-e237d7a03b2e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.986882] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cecff51-3756-456d-8b5c-37dc28063b5e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.990439] env[69994]: DEBUG oslo_vmware.api [None req-bda13e80-9f5e-4eb4-bc7a-9e1a6a238067 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241391, 'name': CloneVM_Task, 'duration_secs': 1.532878} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.990723] env[69994]: INFO nova.virt.vmwareapi.vmops [None req-bda13e80-9f5e-4eb4-bc7a-9e1a6a238067 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Created linked-clone VM from snapshot [ 667.992493] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47573258-3add-47ae-80c7-1500f0854383 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.023135] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fe2ba16-db08-4321-b79f-00efd685cf47 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.034381] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Acquiring lock "3c2c8a40-919d-4280-b9be-f8d95b1a263e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 668.034541] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Lock "3c2c8a40-919d-4280-b9be-f8d95b1a263e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 668.035371] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-bda13e80-9f5e-4eb4-bc7a-9e1a6a238067 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Uploading image 53c9deab-5f2f-487e-ae2e-21e934e22dd5 {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 668.046837] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9523a6ad-bbb6-4715-b172-0ca6e14407f5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.053968] env[69994]: DEBUG oslo_vmware.api [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241398, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.054985] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bda13e80-9f5e-4eb4-bc7a-9e1a6a238067 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Destroying the VM {{(pid=69994) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 668.058366] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-0683a4c5-c3d7-4dec-ac9f-c6639fb86ece {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.069400] env[69994]: DEBUG nova.compute.provider_tree [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 668.075128] env[69994]: DEBUG oslo_vmware.api [None req-bda13e80-9f5e-4eb4-bc7a-9e1a6a238067 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for the task: (returnval){ [ 668.075128] env[69994]: value = "task-3241399" [ 668.075128] env[69994]: _type = "Task" [ 668.075128] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.075403] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241396, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.086613] env[69994]: DEBUG oslo_vmware.api [None req-bda13e80-9f5e-4eb4-bc7a-9e1a6a238067 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241399, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.087676] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f97d01bd-00e7-4500-967f-70f38331d4cf tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Lock "5badecfd-5784-4968-8519-419a01c67465" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.469s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 668.311809] env[69994]: DEBUG nova.compute.manager [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 668.317832] env[69994]: DEBUG nova.network.neutron [None req-476fd883-e2c1-473b-92e5-8ae3b18e70b3 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 668.346301] env[69994]: DEBUG nova.virt.hardware [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 668.346553] env[69994]: DEBUG nova.virt.hardware [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 668.346707] env[69994]: DEBUG nova.virt.hardware [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 668.346885] env[69994]: DEBUG nova.virt.hardware [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 668.347042] env[69994]: DEBUG nova.virt.hardware [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 668.347222] env[69994]: DEBUG nova.virt.hardware [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 668.347453] env[69994]: DEBUG nova.virt.hardware [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 668.347638] env[69994]: DEBUG nova.virt.hardware [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 668.347823] env[69994]: DEBUG nova.virt.hardware [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 668.347975] env[69994]: DEBUG nova.virt.hardware [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 668.348169] env[69994]: DEBUG nova.virt.hardware [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 668.349058] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d863a033-0669-4106-b786-514e05fd14d5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.357443] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-297d2ae0-f297-4657-9bf5-677c3fe5ff17 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.390996] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "refresh_cache-45a8dced-6c49-441c-92e2-ee323ed8753c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.391183] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquired lock "refresh_cache-45a8dced-6c49-441c-92e2-ee323ed8753c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 668.391347] env[69994]: DEBUG nova.network.neutron [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 668.416150] env[69994]: DEBUG nova.network.neutron [None req-476fd883-e2c1-473b-92e5-8ae3b18e70b3 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.543778] env[69994]: DEBUG oslo_vmware.api [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241398, 'name': PowerOnVM_Task, 'duration_secs': 0.593787} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.544169] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 668.548095] env[69994]: DEBUG nova.compute.manager [None req-85da5237-1c14-46fb-bab6-32f2e1b6a8ea tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 668.549285] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d857789-0b43-4281-b7b2-b127add3e405 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.567196] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241396, 'name': CreateVM_Task, 'duration_secs': 1.472229} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.567385] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 668.568903] env[69994]: DEBUG oslo_concurrency.lockutils [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.568903] env[69994]: DEBUG oslo_concurrency.lockutils [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 668.568903] env[69994]: DEBUG oslo_concurrency.lockutils [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 668.568903] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f5b4bc3-324b-4ad7-9776-bdf5e5985301 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.574564] env[69994]: DEBUG oslo_vmware.api [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Waiting for the task: (returnval){ [ 668.574564] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]526a6cb8-567a-04ed-a4bc-3a83ba3e4eb9" [ 668.574564] env[69994]: _type = "Task" [ 668.574564] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.580384] env[69994]: DEBUG nova.scheduler.client.report [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 668.593364] env[69994]: DEBUG oslo_vmware.api [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]526a6cb8-567a-04ed-a4bc-3a83ba3e4eb9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.596850] env[69994]: DEBUG oslo_vmware.api [None req-bda13e80-9f5e-4eb4-bc7a-9e1a6a238067 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241399, 'name': Destroy_Task, 'duration_secs': 0.462935} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.597861] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-bda13e80-9f5e-4eb4-bc7a-9e1a6a238067 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Destroyed the VM [ 668.598349] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bda13e80-9f5e-4eb4-bc7a-9e1a6a238067 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Deleting Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 668.598617] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-94cc7aeb-3bd6-4c8a-bc84-e25539dd5208 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.607160] env[69994]: DEBUG oslo_vmware.api [None req-bda13e80-9f5e-4eb4-bc7a-9e1a6a238067 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for the task: (returnval){ [ 668.607160] env[69994]: value = "task-3241400" [ 668.607160] env[69994]: _type = "Task" [ 668.607160] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.618731] env[69994]: DEBUG oslo_vmware.api [None req-bda13e80-9f5e-4eb4-bc7a-9e1a6a238067 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241400, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.633504] env[69994]: DEBUG nova.compute.manager [req-67dda444-4bb7-47f7-bc6f-2f974a4b6f1a req-823390d4-e0b4-4d14-be48-5450a6a7ab4f service nova] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Received event network-changed-d451c9f0-137b-44de-a79c-ec92c6f843bc {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 668.633504] env[69994]: DEBUG nova.compute.manager [req-67dda444-4bb7-47f7-bc6f-2f974a4b6f1a req-823390d4-e0b4-4d14-be48-5450a6a7ab4f service nova] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Refreshing instance network info cache due to event network-changed-d451c9f0-137b-44de-a79c-ec92c6f843bc. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 668.633504] env[69994]: DEBUG oslo_concurrency.lockutils [req-67dda444-4bb7-47f7-bc6f-2f974a4b6f1a req-823390d4-e0b4-4d14-be48-5450a6a7ab4f service nova] Acquiring lock "refresh_cache-1d548f54-4ffa-4299-9212-717350558ad4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.633504] env[69994]: DEBUG oslo_concurrency.lockutils [req-67dda444-4bb7-47f7-bc6f-2f974a4b6f1a req-823390d4-e0b4-4d14-be48-5450a6a7ab4f service nova] Acquired lock "refresh_cache-1d548f54-4ffa-4299-9212-717350558ad4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 668.633504] env[69994]: DEBUG nova.network.neutron [req-67dda444-4bb7-47f7-bc6f-2f974a4b6f1a req-823390d4-e0b4-4d14-be48-5450a6a7ab4f service nova] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Refreshing network info cache for port d451c9f0-137b-44de-a79c-ec92c6f843bc {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 668.921698] env[69994]: DEBUG oslo_concurrency.lockutils [None req-476fd883-e2c1-473b-92e5-8ae3b18e70b3 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Releasing lock "refresh_cache-21f66039-6292-4d9c-b97d-668d029def24" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 668.921698] env[69994]: DEBUG nova.compute.manager [None req-476fd883-e2c1-473b-92e5-8ae3b18e70b3 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 668.921698] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-476fd883-e2c1-473b-92e5-8ae3b18e70b3 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 668.921698] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1051586f-787c-4f21-82ef-10da201f1e08 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.931648] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-476fd883-e2c1-473b-92e5-8ae3b18e70b3 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 668.932208] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6b0ab10f-cf54-434d-a10f-47510c2a30de {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.940203] env[69994]: DEBUG oslo_vmware.api [None req-476fd883-e2c1-473b-92e5-8ae3b18e70b3 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Waiting for the task: (returnval){ [ 668.940203] env[69994]: value = "task-3241401" [ 668.940203] env[69994]: _type = "Task" [ 668.940203] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.950178] env[69994]: DEBUG oslo_vmware.api [None req-476fd883-e2c1-473b-92e5-8ae3b18e70b3 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': task-3241401, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.954963] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "5f672fd4-b96f-4506-aa1e-96692a00cb43" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 668.955381] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "5f672fd4-b96f-4506-aa1e-96692a00cb43" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 668.963112] env[69994]: DEBUG nova.network.neutron [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 669.088050] env[69994]: DEBUG oslo_vmware.api [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]526a6cb8-567a-04ed-a4bc-3a83ba3e4eb9, 'name': SearchDatastore_Task, 'duration_secs': 0.029432} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.088050] env[69994]: DEBUG oslo_concurrency.lockutils [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 669.088050] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 669.088050] env[69994]: DEBUG oslo_concurrency.lockutils [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.088392] env[69994]: DEBUG oslo_concurrency.lockutils [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 669.088392] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 669.088392] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-22504256-ba48-4748-b8a0-b32c4bb6555c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.091402] env[69994]: DEBUG oslo_concurrency.lockutils [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.813s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 669.091984] env[69994]: DEBUG nova.compute.manager [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 669.094799] env[69994]: DEBUG oslo_concurrency.lockutils [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 23.176s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 669.095157] env[69994]: DEBUG nova.objects.instance [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69994) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 669.109659] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 669.109865] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 669.114766] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b19d188-8a66-4af6-99d8-4b25e3c3ad92 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.121903] env[69994]: DEBUG oslo_vmware.api [None req-bda13e80-9f5e-4eb4-bc7a-9e1a6a238067 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241400, 'name': RemoveSnapshot_Task} progress is 76%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.123339] env[69994]: DEBUG oslo_vmware.api [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Waiting for the task: (returnval){ [ 669.123339] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52ef8c4e-4077-3941-3683-fa5227b48ffc" [ 669.123339] env[69994]: _type = "Task" [ 669.123339] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.132595] env[69994]: DEBUG oslo_vmware.api [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52ef8c4e-4077-3941-3683-fa5227b48ffc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.164059] env[69994]: DEBUG nova.compute.manager [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 669.164954] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ede0c803-042d-4e81-9501-8e00c5ace02b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.391788] env[69994]: DEBUG nova.network.neutron [req-67dda444-4bb7-47f7-bc6f-2f974a4b6f1a req-823390d4-e0b4-4d14-be48-5450a6a7ab4f service nova] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Updated VIF entry in instance network info cache for port d451c9f0-137b-44de-a79c-ec92c6f843bc. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 669.392168] env[69994]: DEBUG nova.network.neutron [req-67dda444-4bb7-47f7-bc6f-2f974a4b6f1a req-823390d4-e0b4-4d14-be48-5450a6a7ab4f service nova] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Updating instance_info_cache with network_info: [{"id": "d451c9f0-137b-44de-a79c-ec92c6f843bc", "address": "fa:16:3e:c1:68:d9", "network": {"id": "a40a358d-770e-4ef2-a47e-4e4a4532f844", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1348265973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97b5a4565fa644a4a510beb5ba006afb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd451c9f0-13", "ovs_interfaceid": "d451c9f0-137b-44de-a79c-ec92c6f843bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.456489] env[69994]: DEBUG oslo_vmware.api [None req-476fd883-e2c1-473b-92e5-8ae3b18e70b3 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': task-3241401, 'name': PowerOffVM_Task, 'duration_secs': 0.148907} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.456489] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-476fd883-e2c1-473b-92e5-8ae3b18e70b3 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 669.456489] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-476fd883-e2c1-473b-92e5-8ae3b18e70b3 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 669.456489] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7bded729-8705-4462-a5d3-bd4d3cd8acb0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.484130] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-476fd883-e2c1-473b-92e5-8ae3b18e70b3 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 669.484422] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-476fd883-e2c1-473b-92e5-8ae3b18e70b3 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 669.484663] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-476fd883-e2c1-473b-92e5-8ae3b18e70b3 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Deleting the datastore file [datastore2] 21f66039-6292-4d9c-b97d-668d029def24 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 669.485249] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-386fc8b7-18c4-4b3d-b0b0-6d02d27b4627 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.491205] env[69994]: DEBUG oslo_vmware.api [None req-476fd883-e2c1-473b-92e5-8ae3b18e70b3 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Waiting for the task: (returnval){ [ 669.491205] env[69994]: value = "task-3241403" [ 669.491205] env[69994]: _type = "Task" [ 669.491205] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.503193] env[69994]: DEBUG oslo_vmware.api [None req-476fd883-e2c1-473b-92e5-8ae3b18e70b3 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': task-3241403, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.538097] env[69994]: DEBUG nova.network.neutron [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Updating instance_info_cache with network_info: [{"id": "003af7d4-a8a5-43d4-b032-96df0b4ae173", "address": "fa:16:3e:a4:62:49", "network": {"id": "6d4a143e-4f55-4918-8454-462892aacf0e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1594130263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "605d72502cc644bfa4d875bf348246de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52e117d3-d120-42c6-8e72-70085845acbf", "external-id": "nsx-vlan-transportzone-934", "segmentation_id": 934, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap003af7d4-a8", "ovs_interfaceid": "003af7d4-a8a5-43d4-b032-96df0b4ae173", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.602582] env[69994]: DEBUG nova.compute.utils [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 669.610155] env[69994]: DEBUG nova.compute.manager [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 669.610155] env[69994]: DEBUG nova.network.neutron [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 669.627711] env[69994]: DEBUG oslo_vmware.api [None req-bda13e80-9f5e-4eb4-bc7a-9e1a6a238067 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241400, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.638426] env[69994]: DEBUG oslo_vmware.api [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52ef8c4e-4077-3941-3683-fa5227b48ffc, 'name': SearchDatastore_Task, 'duration_secs': 0.014419} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.639464] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a475c59-823c-447a-bef5-b23ef609f6c0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.645727] env[69994]: DEBUG oslo_vmware.api [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Waiting for the task: (returnval){ [ 669.645727] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]522227e7-f4a5-e6de-97c6-75c2c1f9adfb" [ 669.645727] env[69994]: _type = "Task" [ 669.645727] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.659209] env[69994]: DEBUG oslo_vmware.api [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]522227e7-f4a5-e6de-97c6-75c2c1f9adfb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.678448] env[69994]: INFO nova.compute.manager [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] instance snapshotting [ 669.681532] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8307fd1c-e3c4-4bf5-8e64-2537b7d58e9c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.702528] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bfd3873-5b91-4b34-8361-64bc815ef5d9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.725017] env[69994]: DEBUG nova.policy [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '13afe1f23e254be7b7ac8aa571efe407', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fa488d1f7e20473da4fd92da7bffe764', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 669.821305] env[69994]: DEBUG nova.network.neutron [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Successfully updated port: 68ff6b46-787c-4682-8626-0a9c421f1cd7 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 669.895409] env[69994]: DEBUG oslo_concurrency.lockutils [req-67dda444-4bb7-47f7-bc6f-2f974a4b6f1a req-823390d4-e0b4-4d14-be48-5450a6a7ab4f service nova] Releasing lock "refresh_cache-1d548f54-4ffa-4299-9212-717350558ad4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 670.001412] env[69994]: DEBUG oslo_vmware.api [None req-476fd883-e2c1-473b-92e5-8ae3b18e70b3 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Task: {'id': task-3241403, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.347109} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.001684] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-476fd883-e2c1-473b-92e5-8ae3b18e70b3 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 670.001865] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-476fd883-e2c1-473b-92e5-8ae3b18e70b3 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 670.002060] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-476fd883-e2c1-473b-92e5-8ae3b18e70b3 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 670.002300] env[69994]: INFO nova.compute.manager [None req-476fd883-e2c1-473b-92e5-8ae3b18e70b3 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Took 1.08 seconds to destroy the instance on the hypervisor. [ 670.002588] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-476fd883-e2c1-473b-92e5-8ae3b18e70b3 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 670.002784] env[69994]: DEBUG nova.compute.manager [-] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 670.002951] env[69994]: DEBUG nova.network.neutron [-] [instance: 21f66039-6292-4d9c-b97d-668d029def24] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 670.030398] env[69994]: DEBUG nova.network.neutron [-] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 670.042604] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Releasing lock "refresh_cache-45a8dced-6c49-441c-92e2-ee323ed8753c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 670.042604] env[69994]: DEBUG nova.compute.manager [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Instance network_info: |[{"id": "003af7d4-a8a5-43d4-b032-96df0b4ae173", "address": "fa:16:3e:a4:62:49", "network": {"id": "6d4a143e-4f55-4918-8454-462892aacf0e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1594130263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "605d72502cc644bfa4d875bf348246de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52e117d3-d120-42c6-8e72-70085845acbf", "external-id": "nsx-vlan-transportzone-934", "segmentation_id": 934, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap003af7d4-a8", "ovs_interfaceid": "003af7d4-a8a5-43d4-b032-96df0b4ae173", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 670.043730] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a4:62:49', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '52e117d3-d120-42c6-8e72-70085845acbf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '003af7d4-a8a5-43d4-b032-96df0b4ae173', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 670.051288] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Creating folder: Project (605d72502cc644bfa4d875bf348246de). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 670.051288] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5ff914cd-e7b2-485d-9003-3c99231575cc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.064239] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Created folder: Project (605d72502cc644bfa4d875bf348246de) in parent group-v647729. [ 670.064428] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Creating folder: Instances. Parent ref: group-v647793. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 670.064659] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b6722251-e4ca-4b96-94b6-9c6789721e30 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.074129] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Created folder: Instances in parent group-v647793. [ 670.074349] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 670.074531] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 670.074722] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ab87210f-8d93-4e5c-995f-1763cb86c7c1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.093929] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 670.093929] env[69994]: value = "task-3241406" [ 670.093929] env[69994]: _type = "Task" [ 670.093929] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.101386] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241406, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.106019] env[69994]: DEBUG nova.compute.manager [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 670.108599] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2c7eea68-37c2-43c3-823d-8545f11832ea tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquiring lock "dbad6bed-64ba-4dfd-abad-c0b2c775ba2c" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 670.108855] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2c7eea68-37c2-43c3-823d-8545f11832ea tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Lock "dbad6bed-64ba-4dfd-abad-c0b2c775ba2c" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 670.108969] env[69994]: DEBUG nova.compute.manager [None req-2c7eea68-37c2-43c3-823d-8545f11832ea tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Going to confirm migration 1 {{(pid=69994) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 670.113182] env[69994]: DEBUG oslo_concurrency.lockutils [None req-901704e1-e6ab-4260-bf23-9fbf73bb1121 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.018s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 670.114106] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.077s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 670.115926] env[69994]: INFO nova.compute.claims [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 670.126604] env[69994]: DEBUG oslo_vmware.api [None req-bda13e80-9f5e-4eb4-bc7a-9e1a6a238067 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241400, 'name': RemoveSnapshot_Task, 'duration_secs': 1.026872} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.126847] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bda13e80-9f5e-4eb4-bc7a-9e1a6a238067 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Deleted Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 670.158647] env[69994]: DEBUG oslo_vmware.api [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]522227e7-f4a5-e6de-97c6-75c2c1f9adfb, 'name': SearchDatastore_Task, 'duration_secs': 0.030021} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.158938] env[69994]: DEBUG oslo_concurrency.lockutils [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 670.159244] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 1d548f54-4ffa-4299-9212-717350558ad4/1d548f54-4ffa-4299-9212-717350558ad4.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 670.159765] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9dc783c0-b2d3-439b-bb58-616884071a07 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.166445] env[69994]: DEBUG oslo_vmware.api [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Waiting for the task: (returnval){ [ 670.166445] env[69994]: value = "task-3241407" [ 670.166445] env[69994]: _type = "Task" [ 670.166445] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.178958] env[69994]: DEBUG oslo_vmware.api [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': task-3241407, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.213665] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Creating Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 670.214011] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c897517e-64f0-4f4e-9935-a5247520f3ff {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.222064] env[69994]: DEBUG oslo_vmware.api [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Waiting for the task: (returnval){ [ 670.222064] env[69994]: value = "task-3241408" [ 670.222064] env[69994]: _type = "Task" [ 670.222064] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.233837] env[69994]: DEBUG oslo_vmware.api [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241408, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.327032] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Acquiring lock "refresh_cache-ad957c30-c923-4bbf-8841-00e99de44781" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.327032] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Acquired lock "refresh_cache-ad957c30-c923-4bbf-8841-00e99de44781" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 670.327032] env[69994]: DEBUG nova.network.neutron [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 670.344203] env[69994]: DEBUG nova.network.neutron [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Successfully created port: 645be6ca-7ed3-4e18-affb-c05101e3a42f {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 670.534585] env[69994]: DEBUG nova.network.neutron [-] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.604036] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241406, 'name': CreateVM_Task, 'duration_secs': 0.431346} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.604477] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 670.605438] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.605624] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 670.605962] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 670.606535] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-156c57eb-5988-458a-b381-acd8f60c3f9d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.623052] env[69994]: DEBUG oslo_vmware.api [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 670.623052] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]522c5014-1da7-070b-4916-9ded99f5367b" [ 670.623052] env[69994]: _type = "Task" [ 670.623052] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.633152] env[69994]: WARNING nova.compute.manager [None req-bda13e80-9f5e-4eb4-bc7a-9e1a6a238067 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Image not found during snapshot: nova.exception.ImageNotFound: Image 53c9deab-5f2f-487e-ae2e-21e934e22dd5 could not be found. [ 670.640463] env[69994]: DEBUG oslo_vmware.api [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]522c5014-1da7-070b-4916-9ded99f5367b, 'name': SearchDatastore_Task, 'duration_secs': 0.018947} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.640964] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 670.641302] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 670.641531] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.641618] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 670.641749] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 670.642020] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cb0e7219-8ef5-44df-9a27-ee2046154109 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.650258] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 670.650453] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 670.651211] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ce31afd-0077-4bb6-b864-c92a2e7404ca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.656641] env[69994]: DEBUG oslo_vmware.api [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 670.656641] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5298cc26-0e12-81b8-3da8-0e945f4d6838" [ 670.656641] env[69994]: _type = "Task" [ 670.656641] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.664744] env[69994]: DEBUG oslo_vmware.api [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5298cc26-0e12-81b8-3da8-0e945f4d6838, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.676099] env[69994]: DEBUG oslo_vmware.api [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': task-3241407, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.476583} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.676349] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 1d548f54-4ffa-4299-9212-717350558ad4/1d548f54-4ffa-4299-9212-717350558ad4.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 670.676548] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 670.676786] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3fc7048d-5217-4f7c-9aae-f165ef6bcfad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.682577] env[69994]: DEBUG oslo_vmware.api [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Waiting for the task: (returnval){ [ 670.682577] env[69994]: value = "task-3241409" [ 670.682577] env[69994]: _type = "Task" [ 670.682577] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.693227] env[69994]: DEBUG oslo_vmware.api [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': task-3241409, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.732194] env[69994]: DEBUG oslo_vmware.api [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241408, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.736165] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2c7eea68-37c2-43c3-823d-8545f11832ea tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquiring lock "refresh_cache-dbad6bed-64ba-4dfd-abad-c0b2c775ba2c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.736165] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2c7eea68-37c2-43c3-823d-8545f11832ea tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquired lock "refresh_cache-dbad6bed-64ba-4dfd-abad-c0b2c775ba2c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 670.736165] env[69994]: DEBUG nova.network.neutron [None req-2c7eea68-37c2-43c3-823d-8545f11832ea tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 670.736165] env[69994]: DEBUG nova.objects.instance [None req-2c7eea68-37c2-43c3-823d-8545f11832ea tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Lazy-loading 'info_cache' on Instance uuid dbad6bed-64ba-4dfd-abad-c0b2c775ba2c {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 670.910703] env[69994]: DEBUG nova.network.neutron [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 670.955116] env[69994]: DEBUG oslo_concurrency.lockutils [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Acquiring lock "558ee84a-731b-4cb1-967d-cf84c8d39718" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 670.955432] env[69994]: DEBUG oslo_concurrency.lockutils [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Lock "558ee84a-731b-4cb1-967d-cf84c8d39718" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 670.976141] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cf392351-b8a4-4f3f-bdd4-3218dba33656 tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Acquiring lock "5badecfd-5784-4968-8519-419a01c67465" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 670.976387] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cf392351-b8a4-4f3f-bdd4-3218dba33656 tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Lock "5badecfd-5784-4968-8519-419a01c67465" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 670.976588] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cf392351-b8a4-4f3f-bdd4-3218dba33656 tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Acquiring lock "5badecfd-5784-4968-8519-419a01c67465-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 670.976768] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cf392351-b8a4-4f3f-bdd4-3218dba33656 tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Lock "5badecfd-5784-4968-8519-419a01c67465-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 670.976934] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cf392351-b8a4-4f3f-bdd4-3218dba33656 tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Lock "5badecfd-5784-4968-8519-419a01c67465-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 670.981190] env[69994]: INFO nova.compute.manager [None req-cf392351-b8a4-4f3f-bdd4-3218dba33656 tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Terminating instance [ 671.038286] env[69994]: INFO nova.compute.manager [-] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Took 1.04 seconds to deallocate network for instance. [ 671.125025] env[69994]: DEBUG nova.compute.manager [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 671.149626] env[69994]: DEBUG nova.virt.hardware [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 671.149902] env[69994]: DEBUG nova.virt.hardware [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 671.150069] env[69994]: DEBUG nova.virt.hardware [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 671.150260] env[69994]: DEBUG nova.virt.hardware [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 671.150475] env[69994]: DEBUG nova.virt.hardware [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 671.150562] env[69994]: DEBUG nova.virt.hardware [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 671.150742] env[69994]: DEBUG nova.virt.hardware [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 671.150896] env[69994]: DEBUG nova.virt.hardware [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 671.151160] env[69994]: DEBUG nova.virt.hardware [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 671.151338] env[69994]: DEBUG nova.virt.hardware [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 671.151525] env[69994]: DEBUG nova.virt.hardware [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 671.152858] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8e65903-768a-4c97-b417-ee832c352e32 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.166421] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-331a5cf1-1797-4b21-8881-ed9d0df8118f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.190918] env[69994]: DEBUG oslo_vmware.api [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5298cc26-0e12-81b8-3da8-0e945f4d6838, 'name': SearchDatastore_Task, 'duration_secs': 0.012135} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.192048] env[69994]: DEBUG nova.network.neutron [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Updating instance_info_cache with network_info: [{"id": "68ff6b46-787c-4682-8626-0a9c421f1cd7", "address": "fa:16:3e:02:6b:db", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.123", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68ff6b46-78", "ovs_interfaceid": "68ff6b46-787c-4682-8626-0a9c421f1cd7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.200508] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7100ebc4-8513-483b-a6cc-84557517bcdd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.208620] env[69994]: DEBUG oslo_vmware.api [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': task-3241409, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066938} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.210311] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 671.211033] env[69994]: DEBUG oslo_vmware.api [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 671.211033] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5299a2be-4138-b26f-70ab-440199d48021" [ 671.211033] env[69994]: _type = "Task" [ 671.211033] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.211639] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-671c52b8-368e-486f-881c-93cabd6c4dd3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.241915] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Reconfiguring VM instance instance-00000014 to attach disk [datastore1] 1d548f54-4ffa-4299-9212-717350558ad4/1d548f54-4ffa-4299-9212-717350558ad4.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 671.253351] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6dd8982-9b54-471e-a853-4ec7e5748707 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.267738] env[69994]: DEBUG oslo_vmware.api [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5299a2be-4138-b26f-70ab-440199d48021, 'name': SearchDatastore_Task, 'duration_secs': 0.016439} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.271178] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 671.271336] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 45a8dced-6c49-441c-92e2-ee323ed8753c/45a8dced-6c49-441c-92e2-ee323ed8753c.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 671.275116] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-24b2c4fe-0bb0-4437-a383-a21d755d3d4b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.277479] env[69994]: DEBUG nova.compute.manager [req-2e611eee-ab12-45b4-b153-c53cecbd2403 req-3d4874ee-bcdb-4113-8cf1-7fa898714a88 service nova] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Received event network-vif-plugged-003af7d4-a8a5-43d4-b032-96df0b4ae173 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 671.277710] env[69994]: DEBUG oslo_concurrency.lockutils [req-2e611eee-ab12-45b4-b153-c53cecbd2403 req-3d4874ee-bcdb-4113-8cf1-7fa898714a88 service nova] Acquiring lock "45a8dced-6c49-441c-92e2-ee323ed8753c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 671.277885] env[69994]: DEBUG oslo_concurrency.lockutils [req-2e611eee-ab12-45b4-b153-c53cecbd2403 req-3d4874ee-bcdb-4113-8cf1-7fa898714a88 service nova] Lock "45a8dced-6c49-441c-92e2-ee323ed8753c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 671.278786] env[69994]: DEBUG oslo_concurrency.lockutils [req-2e611eee-ab12-45b4-b153-c53cecbd2403 req-3d4874ee-bcdb-4113-8cf1-7fa898714a88 service nova] Lock "45a8dced-6c49-441c-92e2-ee323ed8753c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 671.278786] env[69994]: DEBUG nova.compute.manager [req-2e611eee-ab12-45b4-b153-c53cecbd2403 req-3d4874ee-bcdb-4113-8cf1-7fa898714a88 service nova] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] No waiting events found dispatching network-vif-plugged-003af7d4-a8a5-43d4-b032-96df0b4ae173 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 671.278786] env[69994]: WARNING nova.compute.manager [req-2e611eee-ab12-45b4-b153-c53cecbd2403 req-3d4874ee-bcdb-4113-8cf1-7fa898714a88 service nova] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Received unexpected event network-vif-plugged-003af7d4-a8a5-43d4-b032-96df0b4ae173 for instance with vm_state building and task_state spawning. [ 671.278786] env[69994]: DEBUG nova.compute.manager [req-2e611eee-ab12-45b4-b153-c53cecbd2403 req-3d4874ee-bcdb-4113-8cf1-7fa898714a88 service nova] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Received event network-changed-003af7d4-a8a5-43d4-b032-96df0b4ae173 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 671.278786] env[69994]: DEBUG nova.compute.manager [req-2e611eee-ab12-45b4-b153-c53cecbd2403 req-3d4874ee-bcdb-4113-8cf1-7fa898714a88 service nova] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Refreshing instance network info cache due to event network-changed-003af7d4-a8a5-43d4-b032-96df0b4ae173. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 671.279017] env[69994]: DEBUG oslo_concurrency.lockutils [req-2e611eee-ab12-45b4-b153-c53cecbd2403 req-3d4874ee-bcdb-4113-8cf1-7fa898714a88 service nova] Acquiring lock "refresh_cache-45a8dced-6c49-441c-92e2-ee323ed8753c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.279054] env[69994]: DEBUG oslo_concurrency.lockutils [req-2e611eee-ab12-45b4-b153-c53cecbd2403 req-3d4874ee-bcdb-4113-8cf1-7fa898714a88 service nova] Acquired lock "refresh_cache-45a8dced-6c49-441c-92e2-ee323ed8753c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 671.279203] env[69994]: DEBUG nova.network.neutron [req-2e611eee-ab12-45b4-b153-c53cecbd2403 req-3d4874ee-bcdb-4113-8cf1-7fa898714a88 service nova] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Refreshing network info cache for port 003af7d4-a8a5-43d4-b032-96df0b4ae173 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 671.286162] env[69994]: DEBUG oslo_vmware.api [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Waiting for the task: (returnval){ [ 671.286162] env[69994]: value = "task-3241410" [ 671.286162] env[69994]: _type = "Task" [ 671.286162] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.286162] env[69994]: DEBUG oslo_vmware.api [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241408, 'name': CreateSnapshot_Task, 'duration_secs': 1.021427} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.286384] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Created Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 671.293181] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-569da423-8415-4051-9c0a-3a49091c64bc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.300076] env[69994]: DEBUG oslo_vmware.api [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 671.300076] env[69994]: value = "task-3241411" [ 671.300076] env[69994]: _type = "Task" [ 671.300076] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.316292] env[69994]: DEBUG oslo_vmware.api [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': task-3241410, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.325893] env[69994]: DEBUG oslo_vmware.api [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241411, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.485431] env[69994]: DEBUG nova.compute.manager [None req-cf392351-b8a4-4f3f-bdd4-3218dba33656 tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 671.485727] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cf392351-b8a4-4f3f-bdd4-3218dba33656 tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 671.486757] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ff83d3b-fd78-4559-b802-c9ac09ab1ad2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.498168] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf392351-b8a4-4f3f-bdd4-3218dba33656 tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 671.498492] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5de70f39-9958-45b4-a923-258b603996bf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.505383] env[69994]: DEBUG oslo_vmware.api [None req-cf392351-b8a4-4f3f-bdd4-3218dba33656 tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Waiting for the task: (returnval){ [ 671.505383] env[69994]: value = "task-3241412" [ 671.505383] env[69994]: _type = "Task" [ 671.505383] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.522111] env[69994]: DEBUG oslo_vmware.api [None req-cf392351-b8a4-4f3f-bdd4-3218dba33656 tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Task: {'id': task-3241412, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.546260] env[69994]: DEBUG oslo_concurrency.lockutils [None req-476fd883-e2c1-473b-92e5-8ae3b18e70b3 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 671.704322] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Releasing lock "refresh_cache-ad957c30-c923-4bbf-8841-00e99de44781" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 671.704322] env[69994]: DEBUG nova.compute.manager [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Instance network_info: |[{"id": "68ff6b46-787c-4682-8626-0a9c421f1cd7", "address": "fa:16:3e:02:6b:db", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.123", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68ff6b46-78", "ovs_interfaceid": "68ff6b46-787c-4682-8626-0a9c421f1cd7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 671.707140] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:02:6b:db', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0cd5d325-3053-407e-a4ee-f627e82a23f9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '68ff6b46-787c-4682-8626-0a9c421f1cd7', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 671.717277] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 671.721392] env[69994]: DEBUG nova.network.neutron [None req-2c7eea68-37c2-43c3-823d-8545f11832ea tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Updating instance_info_cache with network_info: [{"id": "1c4ae184-b8b0-409f-aff4-5568af2af1b9", "address": "fa:16:3e:23:d3:c2", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.61", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c4ae184-b8", "ovs_interfaceid": "1c4ae184-b8b0-409f-aff4-5568af2af1b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.722389] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 671.724016] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-01260f01-f10f-4ea4-be90-a92c59dcf112 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.750486] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 671.750486] env[69994]: value = "task-3241413" [ 671.750486] env[69994]: _type = "Task" [ 671.750486] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.762461] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241413, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.800049] env[69994]: DEBUG oslo_vmware.api [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': task-3241410, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.809667] env[69994]: DEBUG oslo_vmware.api [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241411, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.826611] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Creating linked-clone VM from snapshot {{(pid=69994) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 671.831034] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-13b0a1a2-603f-4b28-ab4f-105a0e8d4e7e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.843747] env[69994]: DEBUG oslo_vmware.api [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Waiting for the task: (returnval){ [ 671.843747] env[69994]: value = "task-3241414" [ 671.843747] env[69994]: _type = "Task" [ 671.843747] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.859435] env[69994]: DEBUG oslo_vmware.api [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241414, 'name': CloneVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.899454] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06b622e8-1685-4096-8b9b-fcb7ae96a567 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.912421] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8532f26-0491-4b66-8e2d-01fc36e79c1e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.960482] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66269961-ff11-4640-94bf-87fc32257e19 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.970778] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-240916d0-530b-4b27-9003-3fbc96934acd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.991328] env[69994]: DEBUG nova.compute.provider_tree [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 672.018530] env[69994]: DEBUG oslo_vmware.api [None req-cf392351-b8a4-4f3f-bdd4-3218dba33656 tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Task: {'id': task-3241412, 'name': PowerOffVM_Task, 'duration_secs': 0.217092} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.018868] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf392351-b8a4-4f3f-bdd4-3218dba33656 tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 672.019138] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cf392351-b8a4-4f3f-bdd4-3218dba33656 tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 672.019339] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1c89bb5a-c2fc-455e-ac95-80569387dbc2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.023547] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Acquiring lock "91bb882c-7b84-450f-bd03-91ea1ce739ce" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 672.023976] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Lock "91bb882c-7b84-450f-bd03-91ea1ce739ce" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 672.024236] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Acquiring lock "91bb882c-7b84-450f-bd03-91ea1ce739ce-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 672.024447] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Lock "91bb882c-7b84-450f-bd03-91ea1ce739ce-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 672.024715] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Lock "91bb882c-7b84-450f-bd03-91ea1ce739ce-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 672.027783] env[69994]: INFO nova.compute.manager [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Terminating instance [ 672.102324] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cf392351-b8a4-4f3f-bdd4-3218dba33656 tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 672.102584] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cf392351-b8a4-4f3f-bdd4-3218dba33656 tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 672.103128] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf392351-b8a4-4f3f-bdd4-3218dba33656 tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Deleting the datastore file [datastore2] 5badecfd-5784-4968-8519-419a01c67465 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 672.103128] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c7c492cd-4dc2-4a01-ac60-38d2e6fddbce {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.113090] env[69994]: DEBUG oslo_vmware.api [None req-cf392351-b8a4-4f3f-bdd4-3218dba33656 tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Waiting for the task: (returnval){ [ 672.113090] env[69994]: value = "task-3241416" [ 672.113090] env[69994]: _type = "Task" [ 672.113090] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.129901] env[69994]: DEBUG oslo_vmware.api [None req-cf392351-b8a4-4f3f-bdd4-3218dba33656 tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Task: {'id': task-3241416, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.222847] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2c7eea68-37c2-43c3-823d-8545f11832ea tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Releasing lock "refresh_cache-dbad6bed-64ba-4dfd-abad-c0b2c775ba2c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 672.223397] env[69994]: DEBUG nova.objects.instance [None req-2c7eea68-37c2-43c3-823d-8545f11832ea tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Lazy-loading 'migration_context' on Instance uuid dbad6bed-64ba-4dfd-abad-c0b2c775ba2c {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 672.262236] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241413, 'name': CreateVM_Task, 'duration_secs': 0.384735} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.262449] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 672.263185] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.265026] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 672.265026] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 672.265026] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ed471da-049d-4669-8d48-637ca2b1a41e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.270549] env[69994]: DEBUG oslo_vmware.api [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Waiting for the task: (returnval){ [ 672.270549] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5254a06a-b6ba-13f3-2c56-f30dbab52b7e" [ 672.270549] env[69994]: _type = "Task" [ 672.270549] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.279947] env[69994]: DEBUG oslo_vmware.api [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5254a06a-b6ba-13f3-2c56-f30dbab52b7e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.299880] env[69994]: DEBUG oslo_vmware.api [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': task-3241410, 'name': ReconfigVM_Task, 'duration_secs': 0.924477} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.299880] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Reconfigured VM instance instance-00000014 to attach disk [datastore1] 1d548f54-4ffa-4299-9212-717350558ad4/1d548f54-4ffa-4299-9212-717350558ad4.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 672.299880] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f14dad41-5b5c-46b3-a721-0a1a2733521f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.307314] env[69994]: DEBUG oslo_vmware.api [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Waiting for the task: (returnval){ [ 672.307314] env[69994]: value = "task-3241417" [ 672.307314] env[69994]: _type = "Task" [ 672.307314] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.308063] env[69994]: DEBUG nova.network.neutron [req-2e611eee-ab12-45b4-b153-c53cecbd2403 req-3d4874ee-bcdb-4113-8cf1-7fa898714a88 service nova] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Updated VIF entry in instance network info cache for port 003af7d4-a8a5-43d4-b032-96df0b4ae173. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 672.308428] env[69994]: DEBUG nova.network.neutron [req-2e611eee-ab12-45b4-b153-c53cecbd2403 req-3d4874ee-bcdb-4113-8cf1-7fa898714a88 service nova] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Updating instance_info_cache with network_info: [{"id": "003af7d4-a8a5-43d4-b032-96df0b4ae173", "address": "fa:16:3e:a4:62:49", "network": {"id": "6d4a143e-4f55-4918-8454-462892aacf0e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1594130263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "605d72502cc644bfa4d875bf348246de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52e117d3-d120-42c6-8e72-70085845acbf", "external-id": "nsx-vlan-transportzone-934", "segmentation_id": 934, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap003af7d4-a8", "ovs_interfaceid": "003af7d4-a8a5-43d4-b032-96df0b4ae173", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.317547] env[69994]: DEBUG oslo_vmware.api [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241411, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.919741} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.318273] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 45a8dced-6c49-441c-92e2-ee323ed8753c/45a8dced-6c49-441c-92e2-ee323ed8753c.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 672.318642] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 672.319520] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-53fb7604-0c22-4619-87d9-bce8f5ea601b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.326798] env[69994]: DEBUG oslo_vmware.api [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': task-3241417, 'name': Rename_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.332715] env[69994]: DEBUG oslo_vmware.api [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 672.332715] env[69994]: value = "task-3241418" [ 672.332715] env[69994]: _type = "Task" [ 672.332715] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.343343] env[69994]: DEBUG oslo_vmware.api [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241418, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.358263] env[69994]: DEBUG oslo_vmware.api [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241414, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.496210] env[69994]: DEBUG nova.scheduler.client.report [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 672.533168] env[69994]: DEBUG nova.compute.manager [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 672.533412] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 672.534407] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3c5f23f-00d2-481c-929b-b92bcb48585b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.543590] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 672.546019] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a207c20f-a2bc-45e0-9938-bce9f0dee3f9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.554425] env[69994]: DEBUG oslo_vmware.api [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for the task: (returnval){ [ 672.554425] env[69994]: value = "task-3241419" [ 672.554425] env[69994]: _type = "Task" [ 672.554425] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.563478] env[69994]: DEBUG oslo_vmware.api [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241419, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.623803] env[69994]: DEBUG oslo_vmware.api [None req-cf392351-b8a4-4f3f-bdd4-3218dba33656 tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Task: {'id': task-3241416, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.230478} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.624069] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf392351-b8a4-4f3f-bdd4-3218dba33656 tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 672.624255] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cf392351-b8a4-4f3f-bdd4-3218dba33656 tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 672.624466] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cf392351-b8a4-4f3f-bdd4-3218dba33656 tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 672.624646] env[69994]: INFO nova.compute.manager [None req-cf392351-b8a4-4f3f-bdd4-3218dba33656 tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] [instance: 5badecfd-5784-4968-8519-419a01c67465] Took 1.14 seconds to destroy the instance on the hypervisor. [ 672.625383] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cf392351-b8a4-4f3f-bdd4-3218dba33656 tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 672.625383] env[69994]: DEBUG nova.compute.manager [-] [instance: 5badecfd-5784-4968-8519-419a01c67465] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 672.625383] env[69994]: DEBUG nova.network.neutron [-] [instance: 5badecfd-5784-4968-8519-419a01c67465] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 672.648361] env[69994]: DEBUG nova.network.neutron [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Successfully updated port: 645be6ca-7ed3-4e18-affb-c05101e3a42f {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 672.731836] env[69994]: DEBUG nova.objects.base [None req-2c7eea68-37c2-43c3-823d-8545f11832ea tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 672.731836] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65f46084-1a39-4b73-a062-9894c405272b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.759534] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1fcfc25-0a00-43ae-8c16-978a657f499a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.766712] env[69994]: DEBUG oslo_vmware.api [None req-2c7eea68-37c2-43c3-823d-8545f11832ea tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for the task: (returnval){ [ 672.766712] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52c940a5-f41a-48bd-be9d-8303099f27c5" [ 672.766712] env[69994]: _type = "Task" [ 672.766712] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.780916] env[69994]: DEBUG oslo_vmware.api [None req-2c7eea68-37c2-43c3-823d-8545f11832ea tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52c940a5-f41a-48bd-be9d-8303099f27c5, 'name': SearchDatastore_Task, 'duration_secs': 0.009924} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.788187] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2c7eea68-37c2-43c3-823d-8545f11832ea tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 672.788428] env[69994]: DEBUG oslo_vmware.api [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5254a06a-b6ba-13f3-2c56-f30dbab52b7e, 'name': SearchDatastore_Task, 'duration_secs': 0.010925} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.788914] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 672.789156] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 672.789393] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.789549] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 672.789798] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 672.790088] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2b51ab52-5069-43ad-9e08-493788d53876 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.800060] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 672.800260] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 672.801885] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4febe233-6f5b-41ee-9a6c-4e479e21e8fa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.806328] env[69994]: DEBUG oslo_vmware.api [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Waiting for the task: (returnval){ [ 672.806328] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52217129-0bc7-e954-3e38-6add4a4f58ac" [ 672.806328] env[69994]: _type = "Task" [ 672.806328] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.811169] env[69994]: DEBUG oslo_concurrency.lockutils [req-2e611eee-ab12-45b4-b153-c53cecbd2403 req-3d4874ee-bcdb-4113-8cf1-7fa898714a88 service nova] Releasing lock "refresh_cache-45a8dced-6c49-441c-92e2-ee323ed8753c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 672.811771] env[69994]: DEBUG nova.compute.manager [req-2e611eee-ab12-45b4-b153-c53cecbd2403 req-3d4874ee-bcdb-4113-8cf1-7fa898714a88 service nova] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Received event network-vif-plugged-68ff6b46-787c-4682-8626-0a9c421f1cd7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 672.811929] env[69994]: DEBUG oslo_concurrency.lockutils [req-2e611eee-ab12-45b4-b153-c53cecbd2403 req-3d4874ee-bcdb-4113-8cf1-7fa898714a88 service nova] Acquiring lock "ad957c30-c923-4bbf-8841-00e99de44781-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 672.812148] env[69994]: DEBUG oslo_concurrency.lockutils [req-2e611eee-ab12-45b4-b153-c53cecbd2403 req-3d4874ee-bcdb-4113-8cf1-7fa898714a88 service nova] Lock "ad957c30-c923-4bbf-8841-00e99de44781-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 672.812308] env[69994]: DEBUG oslo_concurrency.lockutils [req-2e611eee-ab12-45b4-b153-c53cecbd2403 req-3d4874ee-bcdb-4113-8cf1-7fa898714a88 service nova] Lock "ad957c30-c923-4bbf-8841-00e99de44781-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 672.812528] env[69994]: DEBUG nova.compute.manager [req-2e611eee-ab12-45b4-b153-c53cecbd2403 req-3d4874ee-bcdb-4113-8cf1-7fa898714a88 service nova] [instance: ad957c30-c923-4bbf-8841-00e99de44781] No waiting events found dispatching network-vif-plugged-68ff6b46-787c-4682-8626-0a9c421f1cd7 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 672.812703] env[69994]: WARNING nova.compute.manager [req-2e611eee-ab12-45b4-b153-c53cecbd2403 req-3d4874ee-bcdb-4113-8cf1-7fa898714a88 service nova] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Received unexpected event network-vif-plugged-68ff6b46-787c-4682-8626-0a9c421f1cd7 for instance with vm_state building and task_state spawning. [ 672.812796] env[69994]: DEBUG nova.compute.manager [req-2e611eee-ab12-45b4-b153-c53cecbd2403 req-3d4874ee-bcdb-4113-8cf1-7fa898714a88 service nova] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Received event network-changed-68ff6b46-787c-4682-8626-0a9c421f1cd7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 672.812943] env[69994]: DEBUG nova.compute.manager [req-2e611eee-ab12-45b4-b153-c53cecbd2403 req-3d4874ee-bcdb-4113-8cf1-7fa898714a88 service nova] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Refreshing instance network info cache due to event network-changed-68ff6b46-787c-4682-8626-0a9c421f1cd7. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 672.813135] env[69994]: DEBUG oslo_concurrency.lockutils [req-2e611eee-ab12-45b4-b153-c53cecbd2403 req-3d4874ee-bcdb-4113-8cf1-7fa898714a88 service nova] Acquiring lock "refresh_cache-ad957c30-c923-4bbf-8841-00e99de44781" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.813269] env[69994]: DEBUG oslo_concurrency.lockutils [req-2e611eee-ab12-45b4-b153-c53cecbd2403 req-3d4874ee-bcdb-4113-8cf1-7fa898714a88 service nova] Acquired lock "refresh_cache-ad957c30-c923-4bbf-8841-00e99de44781" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 672.813419] env[69994]: DEBUG nova.network.neutron [req-2e611eee-ab12-45b4-b153-c53cecbd2403 req-3d4874ee-bcdb-4113-8cf1-7fa898714a88 service nova] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Refreshing network info cache for port 68ff6b46-787c-4682-8626-0a9c421f1cd7 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 672.822906] env[69994]: DEBUG oslo_vmware.api [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52217129-0bc7-e954-3e38-6add4a4f58ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.826086] env[69994]: DEBUG oslo_vmware.api [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': task-3241417, 'name': Rename_Task, 'duration_secs': 0.222041} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.826919] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 672.827170] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3914e7a9-e65e-4611-b102-866bc9997d3d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.834691] env[69994]: DEBUG oslo_vmware.api [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Waiting for the task: (returnval){ [ 672.834691] env[69994]: value = "task-3241420" [ 672.834691] env[69994]: _type = "Task" [ 672.834691] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.846333] env[69994]: DEBUG oslo_vmware.api [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241418, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069729} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.849887] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 672.850272] env[69994]: DEBUG oslo_vmware.api [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': task-3241420, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.854021] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7ca5204-062a-4391-8abc-68a49c877aa3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.864162] env[69994]: DEBUG oslo_vmware.api [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241414, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.883189] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Reconfiguring VM instance instance-00000015 to attach disk [datastore1] 45a8dced-6c49-441c-92e2-ee323ed8753c/45a8dced-6c49-441c-92e2-ee323ed8753c.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 672.883662] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-977b6036-66f2-433a-936f-ccc164dc94b4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.904381] env[69994]: DEBUG oslo_vmware.api [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 672.904381] env[69994]: value = "task-3241421" [ 672.904381] env[69994]: _type = "Task" [ 672.904381] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.914318] env[69994]: DEBUG oslo_vmware.api [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241421, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.000478] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.886s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 673.001100] env[69994]: DEBUG nova.compute.manager [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 673.005025] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.255s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 673.006497] env[69994]: INFO nova.compute.claims [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 673.064981] env[69994]: DEBUG oslo_vmware.api [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241419, 'name': PowerOffVM_Task, 'duration_secs': 0.220557} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.065909] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 673.065909] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 673.066073] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ee8b6a39-d915-41c0-a019-9ab54e1e8deb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.126497] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 673.126718] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 673.126914] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Deleting the datastore file [datastore1] 91bb882c-7b84-450f-bd03-91ea1ce739ce {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 673.127191] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-723ddb46-3af2-4c6a-9420-6297ced4835a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.134541] env[69994]: DEBUG oslo_vmware.api [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for the task: (returnval){ [ 673.134541] env[69994]: value = "task-3241423" [ 673.134541] env[69994]: _type = "Task" [ 673.134541] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.143470] env[69994]: DEBUG oslo_vmware.api [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241423, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.151461] env[69994]: DEBUG oslo_concurrency.lockutils [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Acquiring lock "refresh_cache-84bff4c0-9e2e-47f2-a378-70d3c992b58b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.151612] env[69994]: DEBUG oslo_concurrency.lockutils [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Acquired lock "refresh_cache-84bff4c0-9e2e-47f2-a378-70d3c992b58b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 673.151759] env[69994]: DEBUG nova.network.neutron [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 673.323790] env[69994]: DEBUG oslo_vmware.api [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52217129-0bc7-e954-3e38-6add4a4f58ac, 'name': SearchDatastore_Task, 'duration_secs': 0.015133} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.324955] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-931f1b78-2e04-4de7-aa8a-368e344155b1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.332829] env[69994]: DEBUG oslo_vmware.api [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Waiting for the task: (returnval){ [ 673.332829] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5293896b-f4bb-a4ef-6c4e-bbe77f8e5a86" [ 673.332829] env[69994]: _type = "Task" [ 673.332829] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.344316] env[69994]: DEBUG oslo_vmware.api [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5293896b-f4bb-a4ef-6c4e-bbe77f8e5a86, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.349734] env[69994]: DEBUG oslo_vmware.api [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': task-3241420, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.358814] env[69994]: DEBUG oslo_vmware.api [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241414, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.421771] env[69994]: DEBUG oslo_vmware.api [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241421, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.512232] env[69994]: DEBUG nova.compute.utils [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 673.513605] env[69994]: DEBUG nova.network.neutron [-] [instance: 5badecfd-5784-4968-8519-419a01c67465] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.516732] env[69994]: DEBUG nova.compute.manager [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 673.516902] env[69994]: DEBUG nova.network.neutron [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 673.646014] env[69994]: DEBUG oslo_vmware.api [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241423, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.365898} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.646391] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 673.646613] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 673.646794] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 673.647013] env[69994]: INFO nova.compute.manager [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Took 1.11 seconds to destroy the instance on the hypervisor. [ 673.647333] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 673.647620] env[69994]: DEBUG nova.compute.manager [-] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 673.647725] env[69994]: DEBUG nova.network.neutron [-] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 673.687603] env[69994]: DEBUG nova.network.neutron [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 673.770963] env[69994]: DEBUG nova.compute.manager [req-c639ed84-260a-4bf1-8b37-4a0adba12cb7 req-af2833b6-5889-4e00-95db-7f04512f375d service nova] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Received event network-vif-plugged-645be6ca-7ed3-4e18-affb-c05101e3a42f {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 673.774165] env[69994]: DEBUG oslo_concurrency.lockutils [req-c639ed84-260a-4bf1-8b37-4a0adba12cb7 req-af2833b6-5889-4e00-95db-7f04512f375d service nova] Acquiring lock "84bff4c0-9e2e-47f2-a378-70d3c992b58b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 673.774453] env[69994]: DEBUG oslo_concurrency.lockutils [req-c639ed84-260a-4bf1-8b37-4a0adba12cb7 req-af2833b6-5889-4e00-95db-7f04512f375d service nova] Lock "84bff4c0-9e2e-47f2-a378-70d3c992b58b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 673.774576] env[69994]: DEBUG oslo_concurrency.lockutils [req-c639ed84-260a-4bf1-8b37-4a0adba12cb7 req-af2833b6-5889-4e00-95db-7f04512f375d service nova] Lock "84bff4c0-9e2e-47f2-a378-70d3c992b58b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 673.774781] env[69994]: DEBUG nova.compute.manager [req-c639ed84-260a-4bf1-8b37-4a0adba12cb7 req-af2833b6-5889-4e00-95db-7f04512f375d service nova] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] No waiting events found dispatching network-vif-plugged-645be6ca-7ed3-4e18-affb-c05101e3a42f {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 673.775056] env[69994]: WARNING nova.compute.manager [req-c639ed84-260a-4bf1-8b37-4a0adba12cb7 req-af2833b6-5889-4e00-95db-7f04512f375d service nova] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Received unexpected event network-vif-plugged-645be6ca-7ed3-4e18-affb-c05101e3a42f for instance with vm_state building and task_state spawning. [ 673.775328] env[69994]: DEBUG nova.compute.manager [req-c639ed84-260a-4bf1-8b37-4a0adba12cb7 req-af2833b6-5889-4e00-95db-7f04512f375d service nova] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Received event network-changed-645be6ca-7ed3-4e18-affb-c05101e3a42f {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 673.775582] env[69994]: DEBUG nova.compute.manager [req-c639ed84-260a-4bf1-8b37-4a0adba12cb7 req-af2833b6-5889-4e00-95db-7f04512f375d service nova] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Refreshing instance network info cache due to event network-changed-645be6ca-7ed3-4e18-affb-c05101e3a42f. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 673.775875] env[69994]: DEBUG oslo_concurrency.lockutils [req-c639ed84-260a-4bf1-8b37-4a0adba12cb7 req-af2833b6-5889-4e00-95db-7f04512f375d service nova] Acquiring lock "refresh_cache-84bff4c0-9e2e-47f2-a378-70d3c992b58b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.802110] env[69994]: DEBUG nova.policy [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '618f7d6ab15f478c87957934738484d4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2b29d9865d614bbcba48367777782afd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 673.846633] env[69994]: DEBUG oslo_vmware.api [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5293896b-f4bb-a4ef-6c4e-bbe77f8e5a86, 'name': SearchDatastore_Task, 'duration_secs': 0.0176} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.847390] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 673.847695] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] ad957c30-c923-4bbf-8841-00e99de44781/ad957c30-c923-4bbf-8841-00e99de44781.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 673.848320] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b178e7b9-f1a2-4fb3-b118-550ac4f0a69a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.853451] env[69994]: DEBUG oslo_vmware.api [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': task-3241420, 'name': PowerOnVM_Task, 'duration_secs': 0.859009} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.857456] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 673.857692] env[69994]: INFO nova.compute.manager [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Took 11.02 seconds to spawn the instance on the hypervisor. [ 673.857876] env[69994]: DEBUG nova.compute.manager [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 673.860391] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97435315-cf81-4e84-95a5-830dbfd2c8d9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.864279] env[69994]: DEBUG oslo_vmware.api [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Waiting for the task: (returnval){ [ 673.864279] env[69994]: value = "task-3241424" [ 673.864279] env[69994]: _type = "Task" [ 673.864279] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.875307] env[69994]: DEBUG oslo_vmware.api [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241414, 'name': CloneVM_Task, 'duration_secs': 1.940471} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.879113] env[69994]: INFO nova.virt.vmwareapi.vmops [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Created linked-clone VM from snapshot [ 673.881461] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62c4fc0c-f381-4b8e-833a-57eccb50af3d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.888533] env[69994]: DEBUG oslo_vmware.api [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241424, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.895503] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Uploading image b0eeb296-ea9a-4b6e-b148-a29401cca7af {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 673.917383] env[69994]: DEBUG oslo_vmware.api [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241421, 'name': ReconfigVM_Task, 'duration_secs': 0.670418} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.917786] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Reconfigured VM instance instance-00000015 to attach disk [datastore1] 45a8dced-6c49-441c-92e2-ee323ed8753c/45a8dced-6c49-441c-92e2-ee323ed8753c.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 673.918698] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e3843b57-e5bc-43be-bea9-5c44f4a2a067 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.923114] env[69994]: DEBUG oslo_vmware.rw_handles [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 673.923114] env[69994]: value = "vm-647798" [ 673.923114] env[69994]: _type = "VirtualMachine" [ 673.923114] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 673.923370] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-393ea83b-7a9c-4305-9ab0-c3aafb840d91 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.926452] env[69994]: DEBUG oslo_vmware.api [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 673.926452] env[69994]: value = "task-3241425" [ 673.926452] env[69994]: _type = "Task" [ 673.926452] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.933325] env[69994]: DEBUG oslo_vmware.rw_handles [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Lease: (returnval){ [ 673.933325] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52c7e7ec-08df-78b2-06e8-67c6dcb7574f" [ 673.933325] env[69994]: _type = "HttpNfcLease" [ 673.933325] env[69994]: } obtained for exporting VM: (result){ [ 673.933325] env[69994]: value = "vm-647798" [ 673.933325] env[69994]: _type = "VirtualMachine" [ 673.933325] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 673.933604] env[69994]: DEBUG oslo_vmware.api [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Waiting for the lease: (returnval){ [ 673.933604] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52c7e7ec-08df-78b2-06e8-67c6dcb7574f" [ 673.933604] env[69994]: _type = "HttpNfcLease" [ 673.933604] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 673.940328] env[69994]: DEBUG oslo_vmware.api [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241425, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.943761] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 673.943761] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52c7e7ec-08df-78b2-06e8-67c6dcb7574f" [ 673.943761] env[69994]: _type = "HttpNfcLease" [ 673.943761] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 673.956417] env[69994]: DEBUG nova.network.neutron [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Updating instance_info_cache with network_info: [{"id": "645be6ca-7ed3-4e18-affb-c05101e3a42f", "address": "fa:16:3e:4d:e5:07", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.58", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap645be6ca-7e", "ovs_interfaceid": "645be6ca-7ed3-4e18-affb-c05101e3a42f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.019045] env[69994]: DEBUG nova.compute.manager [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 674.023286] env[69994]: INFO nova.compute.manager [-] [instance: 5badecfd-5784-4968-8519-419a01c67465] Took 1.40 seconds to deallocate network for instance. [ 674.360281] env[69994]: DEBUG nova.network.neutron [req-2e611eee-ab12-45b4-b153-c53cecbd2403 req-3d4874ee-bcdb-4113-8cf1-7fa898714a88 service nova] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Updated VIF entry in instance network info cache for port 68ff6b46-787c-4682-8626-0a9c421f1cd7. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 674.360281] env[69994]: DEBUG nova.network.neutron [req-2e611eee-ab12-45b4-b153-c53cecbd2403 req-3d4874ee-bcdb-4113-8cf1-7fa898714a88 service nova] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Updating instance_info_cache with network_info: [{"id": "68ff6b46-787c-4682-8626-0a9c421f1cd7", "address": "fa:16:3e:02:6b:db", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.123", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68ff6b46-78", "ovs_interfaceid": "68ff6b46-787c-4682-8626-0a9c421f1cd7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.375538] env[69994]: DEBUG oslo_vmware.api [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241424, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.391575] env[69994]: INFO nova.compute.manager [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Took 36.17 seconds to build instance. [ 674.449648] env[69994]: DEBUG oslo_vmware.api [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241425, 'name': Rename_Task, 'duration_secs': 0.212802} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.450722] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 674.450920] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a0d8566f-d15d-4058-81ae-6e4154b23989 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.455864] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 674.455864] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52c7e7ec-08df-78b2-06e8-67c6dcb7574f" [ 674.455864] env[69994]: _type = "HttpNfcLease" [ 674.455864] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 674.456795] env[69994]: DEBUG oslo_vmware.rw_handles [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 674.456795] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52c7e7ec-08df-78b2-06e8-67c6dcb7574f" [ 674.456795] env[69994]: _type = "HttpNfcLease" [ 674.456795] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 674.458564] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee736121-1c96-4c02-b4dc-e7d49708a3d6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.467214] env[69994]: DEBUG oslo_concurrency.lockutils [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Releasing lock "refresh_cache-84bff4c0-9e2e-47f2-a378-70d3c992b58b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 674.469492] env[69994]: DEBUG nova.compute.manager [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Instance network_info: |[{"id": "645be6ca-7ed3-4e18-affb-c05101e3a42f", "address": "fa:16:3e:4d:e5:07", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.58", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap645be6ca-7e", "ovs_interfaceid": "645be6ca-7ed3-4e18-affb-c05101e3a42f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 674.469492] env[69994]: DEBUG oslo_vmware.api [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 674.469492] env[69994]: value = "task-3241427" [ 674.469492] env[69994]: _type = "Task" [ 674.469492] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.469903] env[69994]: DEBUG oslo_concurrency.lockutils [req-c639ed84-260a-4bf1-8b37-4a0adba12cb7 req-af2833b6-5889-4e00-95db-7f04512f375d service nova] Acquired lock "refresh_cache-84bff4c0-9e2e-47f2-a378-70d3c992b58b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 674.469903] env[69994]: DEBUG nova.network.neutron [req-c639ed84-260a-4bf1-8b37-4a0adba12cb7 req-af2833b6-5889-4e00-95db-7f04512f375d service nova] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Refreshing network info cache for port 645be6ca-7ed3-4e18-affb-c05101e3a42f {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 674.470750] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4d:e5:07', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0cd5d325-3053-407e-a4ee-f627e82a23f9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '645be6ca-7ed3-4e18-affb-c05101e3a42f', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 674.484155] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Creating folder: Project (fa488d1f7e20473da4fd92da7bffe764). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 674.492360] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bbb1e23a-95c4-415b-aee2-6c8a0bca0ecb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.505287] env[69994]: DEBUG oslo_vmware.rw_handles [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520f1f0a-d2f6-eecc-661b-06c45af60b4d/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 674.509026] env[69994]: DEBUG oslo_vmware.rw_handles [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520f1f0a-d2f6-eecc-661b-06c45af60b4d/disk-0.vmdk for reading. {{(pid=69994) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 674.567322] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cf392351-b8a4-4f3f-bdd4-3218dba33656 tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 674.575874] env[69994]: DEBUG oslo_vmware.api [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241427, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.576138] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Created folder: Project (fa488d1f7e20473da4fd92da7bffe764) in parent group-v647729. [ 674.576312] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Creating folder: Instances. Parent ref: group-v647799. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 674.577359] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ad4bb53c-a841-4832-bc57-0e0b0ddd8005 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.586041] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Created folder: Instances in parent group-v647799. [ 674.586289] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 674.586480] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 674.586737] env[69994]: DEBUG nova.network.neutron [-] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.587800] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ea50cb21-04c5-4c21-bf20-22177373ed1d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.613227] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 674.613227] env[69994]: value = "task-3241430" [ 674.613227] env[69994]: _type = "Task" [ 674.613227] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.621420] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241430, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.684478] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-6aa10f95-8790-4cc6-b0cc-b005586c17e7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.782140] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bce910e4-7f52-4f82-8ead-59543c463e79 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.789963] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d374bd7-f61f-4dd8-b769-bc250cdf56b7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.827826] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f01cbed0-3591-4345-841c-9c38a9194332 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.832443] env[69994]: DEBUG nova.compute.manager [req-e3565eb0-e091-4066-8773-96a219b544e2 req-d3d1c81d-8cee-4a1c-aba8-7074f293227d service nova] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Received event network-vif-deleted-73644aa1-0c58-40cd-8d52-00e4b388d8bf {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 674.837879] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca30ba1e-523d-4628-a26c-88a3ebc2eb90 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.854866] env[69994]: DEBUG nova.compute.provider_tree [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 674.866377] env[69994]: DEBUG oslo_concurrency.lockutils [req-2e611eee-ab12-45b4-b153-c53cecbd2403 req-3d4874ee-bcdb-4113-8cf1-7fa898714a88 service nova] Releasing lock "refresh_cache-ad957c30-c923-4bbf-8841-00e99de44781" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 674.876722] env[69994]: DEBUG oslo_vmware.api [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241424, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.66357} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.876981] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] ad957c30-c923-4bbf-8841-00e99de44781/ad957c30-c923-4bbf-8841-00e99de44781.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 674.877207] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 674.877491] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6891b2c0-4a0c-431a-bd54-6d19bcb5125a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.884396] env[69994]: DEBUG oslo_vmware.api [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Waiting for the task: (returnval){ [ 674.884396] env[69994]: value = "task-3241431" [ 674.884396] env[69994]: _type = "Task" [ 674.884396] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.893692] env[69994]: DEBUG oslo_concurrency.lockutils [None req-365104fc-2349-4a5e-b173-8c0ad144098e tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Lock "1d548f54-4ffa-4299-9212-717350558ad4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.407s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 674.894009] env[69994]: DEBUG oslo_vmware.api [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241431, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.978242] env[69994]: DEBUG nova.network.neutron [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Successfully created port: 7766d3a6-c9e9-46c7-ae9d-3e22ffcc98a0 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 674.994192] env[69994]: DEBUG oslo_vmware.api [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241427, 'name': PowerOnVM_Task} progress is 78%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.077603] env[69994]: DEBUG nova.compute.manager [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 675.111022] env[69994]: INFO nova.compute.manager [-] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Took 1.46 seconds to deallocate network for instance. [ 675.114695] env[69994]: DEBUG nova.virt.hardware [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 675.115496] env[69994]: DEBUG nova.virt.hardware [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 675.115879] env[69994]: DEBUG nova.virt.hardware [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 675.118432] env[69994]: DEBUG nova.virt.hardware [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 675.118667] env[69994]: DEBUG nova.virt.hardware [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 675.118855] env[69994]: DEBUG nova.virt.hardware [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 675.119151] env[69994]: DEBUG nova.virt.hardware [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 675.119538] env[69994]: DEBUG nova.virt.hardware [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 675.119538] env[69994]: DEBUG nova.virt.hardware [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 675.119638] env[69994]: DEBUG nova.virt.hardware [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 675.119807] env[69994]: DEBUG nova.virt.hardware [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 675.124720] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf69d820-dee1-4223-aba8-8ba869d14aaf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.149263] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcd209aa-3c08-4ec6-9242-05e54dc236c9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.156709] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241430, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.282901] env[69994]: DEBUG nova.network.neutron [req-c639ed84-260a-4bf1-8b37-4a0adba12cb7 req-af2833b6-5889-4e00-95db-7f04512f375d service nova] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Updated VIF entry in instance network info cache for port 645be6ca-7ed3-4e18-affb-c05101e3a42f. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 675.283347] env[69994]: DEBUG nova.network.neutron [req-c639ed84-260a-4bf1-8b37-4a0adba12cb7 req-af2833b6-5889-4e00-95db-7f04512f375d service nova] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Updating instance_info_cache with network_info: [{"id": "645be6ca-7ed3-4e18-affb-c05101e3a42f", "address": "fa:16:3e:4d:e5:07", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.58", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap645be6ca-7e", "ovs_interfaceid": "645be6ca-7ed3-4e18-affb-c05101e3a42f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.358661] env[69994]: DEBUG nova.scheduler.client.report [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 675.398058] env[69994]: DEBUG nova.compute.manager [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 675.404160] env[69994]: DEBUG oslo_vmware.api [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241431, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069179} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.404710] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 675.405713] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-328f354b-1779-4f99-b131-1021afb11b28 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.431909] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Reconfiguring VM instance instance-00000016 to attach disk [datastore1] ad957c30-c923-4bbf-8841-00e99de44781/ad957c30-c923-4bbf-8841-00e99de44781.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 675.432334] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fcfde9e3-6c13-4206-8b28-ebb90b1804dd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.458576] env[69994]: DEBUG oslo_vmware.api [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Waiting for the task: (returnval){ [ 675.458576] env[69994]: value = "task-3241432" [ 675.458576] env[69994]: _type = "Task" [ 675.458576] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.466540] env[69994]: DEBUG oslo_vmware.api [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241432, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.498413] env[69994]: DEBUG oslo_vmware.api [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241427, 'name': PowerOnVM_Task, 'duration_secs': 0.864453} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.499174] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 675.499500] env[69994]: INFO nova.compute.manager [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Took 9.87 seconds to spawn the instance on the hypervisor. [ 675.499742] env[69994]: DEBUG nova.compute.manager [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 675.500592] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00ef574b-508b-42d3-a17e-19388a3f4065 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.636305] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241430, 'name': CreateVM_Task, 'duration_secs': 0.606268} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.637640] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 675.639197] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 675.639197] env[69994]: DEBUG oslo_concurrency.lockutils [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 675.639197] env[69994]: DEBUG oslo_concurrency.lockutils [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 675.639479] env[69994]: DEBUG oslo_concurrency.lockutils [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 675.640082] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df089e14-0ad4-4d0b-baa4-2490894ec9f9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.647235] env[69994]: DEBUG oslo_vmware.api [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Waiting for the task: (returnval){ [ 675.647235] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52c463b8-dfc9-26c7-0a83-8bdef6ac5577" [ 675.647235] env[69994]: _type = "Task" [ 675.647235] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.656929] env[69994]: DEBUG oslo_vmware.api [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52c463b8-dfc9-26c7-0a83-8bdef6ac5577, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.790590] env[69994]: DEBUG oslo_concurrency.lockutils [req-c639ed84-260a-4bf1-8b37-4a0adba12cb7 req-af2833b6-5889-4e00-95db-7f04512f375d service nova] Releasing lock "refresh_cache-84bff4c0-9e2e-47f2-a378-70d3c992b58b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 675.790900] env[69994]: DEBUG nova.compute.manager [req-c639ed84-260a-4bf1-8b37-4a0adba12cb7 req-af2833b6-5889-4e00-95db-7f04512f375d service nova] [instance: 5badecfd-5784-4968-8519-419a01c67465] Received event network-vif-deleted-097f8c85-cd23-443b-8f4a-aae58ce5d392 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 675.876261] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.870s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 675.877360] env[69994]: DEBUG nova.compute.manager [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 675.880936] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7fd06ea0-ee52-424f-97a9-fc86e82a3d85 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.691s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 675.881108] env[69994]: DEBUG nova.objects.instance [None req-7fd06ea0-ee52-424f-97a9-fc86e82a3d85 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Lazy-loading 'resources' on Instance uuid 84efe900-1d79-42f9-b3c6-54299757cdbc {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 675.942883] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 675.971164] env[69994]: DEBUG oslo_vmware.api [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241432, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.027385] env[69994]: INFO nova.compute.manager [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Took 36.32 seconds to build instance. [ 676.160099] env[69994]: DEBUG oslo_vmware.api [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52c463b8-dfc9-26c7-0a83-8bdef6ac5577, 'name': SearchDatastore_Task, 'duration_secs': 0.012996} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.160416] env[69994]: DEBUG oslo_concurrency.lockutils [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 676.160676] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 676.160956] env[69994]: DEBUG oslo_concurrency.lockutils [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 676.161138] env[69994]: DEBUG oslo_concurrency.lockutils [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 676.165019] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 676.165019] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f87ece78-963d-4b7b-a1fc-496416f08abc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.173206] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 676.173433] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 676.178735] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cef815c7-5324-4770-8d84-bf9a2f10f75a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.191315] env[69994]: DEBUG oslo_vmware.api [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Waiting for the task: (returnval){ [ 676.191315] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52dc1335-b4ba-bbaf-67ce-41e67459a94e" [ 676.191315] env[69994]: _type = "Task" [ 676.191315] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.199831] env[69994]: DEBUG oslo_vmware.api [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52dc1335-b4ba-bbaf-67ce-41e67459a94e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.387541] env[69994]: DEBUG nova.compute.utils [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 676.393762] env[69994]: DEBUG nova.compute.manager [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 676.393762] env[69994]: DEBUG nova.network.neutron [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 676.468215] env[69994]: DEBUG nova.policy [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd74f1f7950a94d799185d2322a6c4a38', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7799f51750bb4c2589042a3b7bc8af01', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 676.473749] env[69994]: DEBUG oslo_vmware.api [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241432, 'name': ReconfigVM_Task, 'duration_secs': 0.70945} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.476566] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Reconfigured VM instance instance-00000016 to attach disk [datastore1] ad957c30-c923-4bbf-8841-00e99de44781/ad957c30-c923-4bbf-8841-00e99de44781.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 676.477648] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-068d0c4a-d19d-43dc-b191-ec6cfafe6d78 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.484787] env[69994]: DEBUG oslo_vmware.api [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Waiting for the task: (returnval){ [ 676.484787] env[69994]: value = "task-3241433" [ 676.484787] env[69994]: _type = "Task" [ 676.484787] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.494843] env[69994]: DEBUG oslo_vmware.api [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241433, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.531642] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9ac781b5-c504-4b01-9851-70dba101455c tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "45a8dced-6c49-441c-92e2-ee323ed8753c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.560s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 676.709227] env[69994]: DEBUG oslo_vmware.api [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52dc1335-b4ba-bbaf-67ce-41e67459a94e, 'name': SearchDatastore_Task, 'duration_secs': 0.013598} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.718917] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d04532c-3af2-49f8-a67b-ac94d02865b4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.723152] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Acquiring lock "4dbf53e0-caa1-41f4-8376-dfba8d8567cd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 676.723524] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Lock "4dbf53e0-caa1-41f4-8376-dfba8d8567cd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 676.727848] env[69994]: DEBUG oslo_vmware.api [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Waiting for the task: (returnval){ [ 676.727848] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52d0439a-d047-1070-3e68-4d55b614e3d1" [ 676.727848] env[69994]: _type = "Task" [ 676.727848] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.738438] env[69994]: DEBUG oslo_vmware.api [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52d0439a-d047-1070-3e68-4d55b614e3d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.893354] env[69994]: DEBUG nova.compute.manager [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 677.004118] env[69994]: DEBUG oslo_vmware.api [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241433, 'name': Rename_Task, 'duration_secs': 0.193377} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.004502] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 677.004900] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a237e427-1152-4a2d-adfd-eb004a5494d7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.018855] env[69994]: DEBUG oslo_vmware.api [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Waiting for the task: (returnval){ [ 677.018855] env[69994]: value = "task-3241434" [ 677.018855] env[69994]: _type = "Task" [ 677.018855] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.031570] env[69994]: DEBUG oslo_vmware.api [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241434, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.042382] env[69994]: DEBUG nova.compute.manager [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 677.063962] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe7bf97c-1c0b-4497-8ea5-831abfbe4810 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.071079] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41893196-b580-4c34-86ed-cfbdd1ba9267 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.109012] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfe1100b-3bd5-4365-ad30-679aed4cf157 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.116560] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5580faba-a6f2-44d4-9ea3-303d4a40c8b1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.131020] env[69994]: DEBUG nova.compute.provider_tree [None req-7fd06ea0-ee52-424f-97a9-fc86e82a3d85 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 677.244082] env[69994]: DEBUG oslo_vmware.api [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52d0439a-d047-1070-3e68-4d55b614e3d1, 'name': SearchDatastore_Task, 'duration_secs': 0.012044} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.244082] env[69994]: DEBUG oslo_concurrency.lockutils [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 677.244403] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 84bff4c0-9e2e-47f2-a378-70d3c992b58b/84bff4c0-9e2e-47f2-a378-70d3c992b58b.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 677.244607] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-83d97b21-9475-4804-a35c-f9f49a39d414 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.252670] env[69994]: DEBUG oslo_vmware.api [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Waiting for the task: (returnval){ [ 677.252670] env[69994]: value = "task-3241435" [ 677.252670] env[69994]: _type = "Task" [ 677.252670] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.279236] env[69994]: DEBUG oslo_vmware.api [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Task: {'id': task-3241435, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.538182] env[69994]: DEBUG oslo_vmware.api [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241434, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.545444] env[69994]: DEBUG nova.network.neutron [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Successfully created port: f2652bdf-bba7-4a73-9045-397e55945ed1 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 677.581403] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 677.638184] env[69994]: DEBUG nova.scheduler.client.report [None req-7fd06ea0-ee52-424f-97a9-fc86e82a3d85 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 677.770023] env[69994]: DEBUG oslo_vmware.api [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Task: {'id': task-3241435, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.909996] env[69994]: DEBUG nova.compute.manager [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 677.951134] env[69994]: DEBUG nova.virt.hardware [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 677.951134] env[69994]: DEBUG nova.virt.hardware [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 677.951134] env[69994]: DEBUG nova.virt.hardware [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 677.951858] env[69994]: DEBUG nova.virt.hardware [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 677.952670] env[69994]: DEBUG nova.virt.hardware [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 677.953085] env[69994]: DEBUG nova.virt.hardware [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 677.953660] env[69994]: DEBUG nova.virt.hardware [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 677.954026] env[69994]: DEBUG nova.virt.hardware [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 677.954398] env[69994]: DEBUG nova.virt.hardware [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 677.954771] env[69994]: DEBUG nova.virt.hardware [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 677.955141] env[69994]: DEBUG nova.virt.hardware [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 677.956378] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27f24e19-33d0-4e6d-a540-1f0528aeed4c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.966826] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c94855d0-5885-4a51-a09a-6e95b5de1df0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.031842] env[69994]: DEBUG oslo_vmware.api [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241434, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.148061] env[69994]: DEBUG nova.network.neutron [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Successfully updated port: 7766d3a6-c9e9-46c7-ae9d-3e22ffcc98a0 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 678.150876] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7fd06ea0-ee52-424f-97a9-fc86e82a3d85 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.269s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 678.154425] env[69994]: DEBUG oslo_concurrency.lockutils [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 24.757s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 678.155833] env[69994]: DEBUG nova.objects.instance [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69994) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 678.180088] env[69994]: INFO nova.scheduler.client.report [None req-7fd06ea0-ee52-424f-97a9-fc86e82a3d85 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Deleted allocations for instance 84efe900-1d79-42f9-b3c6-54299757cdbc [ 678.270416] env[69994]: DEBUG oslo_vmware.api [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Task: {'id': task-3241435, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.696814} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.270416] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 84bff4c0-9e2e-47f2-a378-70d3c992b58b/84bff4c0-9e2e-47f2-a378-70d3c992b58b.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 678.270416] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 678.271012] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-994133a5-b4bd-4e5e-96dc-b3f1050b3c3d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.279210] env[69994]: DEBUG oslo_vmware.api [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Waiting for the task: (returnval){ [ 678.279210] env[69994]: value = "task-3241436" [ 678.279210] env[69994]: _type = "Task" [ 678.279210] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.289942] env[69994]: DEBUG oslo_vmware.api [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Task: {'id': task-3241436, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.536756] env[69994]: DEBUG oslo_vmware.api [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241434, 'name': PowerOnVM_Task, 'duration_secs': 1.498218} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.537167] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 678.537513] env[69994]: INFO nova.compute.manager [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Took 10.23 seconds to spawn the instance on the hypervisor. [ 678.537809] env[69994]: DEBUG nova.compute.manager [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 678.538777] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fc5c887-7870-44ac-8f95-09b5bf9094fd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.654991] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Acquiring lock "refresh_cache-cef66a67-e3ac-40dc-a8a4-0375bd64c484" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 678.654991] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Acquired lock "refresh_cache-cef66a67-e3ac-40dc-a8a4-0375bd64c484" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 678.656578] env[69994]: DEBUG nova.network.neutron [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 678.689685] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7fd06ea0-ee52-424f-97a9-fc86e82a3d85 tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Lock "84efe900-1d79-42f9-b3c6-54299757cdbc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.927s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 678.744804] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 678.745161] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 678.797048] env[69994]: DEBUG oslo_vmware.api [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Task: {'id': task-3241436, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.161937} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.799503] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 678.799503] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86b8558a-0b61-460c-8d31-f442f7a9c7f5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.829128] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Reconfiguring VM instance instance-00000017 to attach disk [datastore2] 84bff4c0-9e2e-47f2-a378-70d3c992b58b/84bff4c0-9e2e-47f2-a378-70d3c992b58b.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 678.829857] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-69b1b299-b3ab-4625-bace-6a8b02708dac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.852900] env[69994]: DEBUG oslo_vmware.api [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Waiting for the task: (returnval){ [ 678.852900] env[69994]: value = "task-3241437" [ 678.852900] env[69994]: _type = "Task" [ 678.852900] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.862438] env[69994]: DEBUG oslo_vmware.api [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Task: {'id': task-3241437, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.979619] env[69994]: DEBUG oslo_concurrency.lockutils [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Acquiring lock "214b3508-6fb9-455e-be6b-bd9f6902b7ae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 678.979906] env[69994]: DEBUG oslo_concurrency.lockutils [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Lock "214b3508-6fb9-455e-be6b-bd9f6902b7ae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 679.007016] env[69994]: DEBUG nova.compute.manager [req-d9750539-fa95-44bb-be1c-86035d212a4b req-b2977c24-7595-4453-907e-521a97a4c7cc service nova] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Received event network-vif-plugged-7766d3a6-c9e9-46c7-ae9d-3e22ffcc98a0 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 679.007541] env[69994]: DEBUG oslo_concurrency.lockutils [req-d9750539-fa95-44bb-be1c-86035d212a4b req-b2977c24-7595-4453-907e-521a97a4c7cc service nova] Acquiring lock "cef66a67-e3ac-40dc-a8a4-0375bd64c484-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 679.007580] env[69994]: DEBUG oslo_concurrency.lockutils [req-d9750539-fa95-44bb-be1c-86035d212a4b req-b2977c24-7595-4453-907e-521a97a4c7cc service nova] Lock "cef66a67-e3ac-40dc-a8a4-0375bd64c484-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 679.007753] env[69994]: DEBUG oslo_concurrency.lockutils [req-d9750539-fa95-44bb-be1c-86035d212a4b req-b2977c24-7595-4453-907e-521a97a4c7cc service nova] Lock "cef66a67-e3ac-40dc-a8a4-0375bd64c484-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 679.007970] env[69994]: DEBUG nova.compute.manager [req-d9750539-fa95-44bb-be1c-86035d212a4b req-b2977c24-7595-4453-907e-521a97a4c7cc service nova] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] No waiting events found dispatching network-vif-plugged-7766d3a6-c9e9-46c7-ae9d-3e22ffcc98a0 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 679.008596] env[69994]: WARNING nova.compute.manager [req-d9750539-fa95-44bb-be1c-86035d212a4b req-b2977c24-7595-4453-907e-521a97a4c7cc service nova] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Received unexpected event network-vif-plugged-7766d3a6-c9e9-46c7-ae9d-3e22ffcc98a0 for instance with vm_state building and task_state spawning. [ 679.061899] env[69994]: INFO nova.compute.manager [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Took 38.31 seconds to build instance. [ 679.168686] env[69994]: DEBUG oslo_concurrency.lockutils [None req-35e950a0-9e03-4522-8e68-aac38b6189ee tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 679.169848] env[69994]: DEBUG oslo_concurrency.lockutils [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.444s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 679.171589] env[69994]: INFO nova.compute.claims [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 679.217182] env[69994]: DEBUG nova.network.neutron [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 679.257671] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 679.257903] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 679.258161] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 679.259551] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 679.259551] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 679.259551] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 679.259551] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69994) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 679.259551] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 679.372100] env[69994]: DEBUG oslo_vmware.api [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Task: {'id': task-3241437, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.459872] env[69994]: DEBUG nova.network.neutron [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Updating instance_info_cache with network_info: [{"id": "7766d3a6-c9e9-46c7-ae9d-3e22ffcc98a0", "address": "fa:16:3e:68:a6:86", "network": {"id": "134b397d-887f-40ba-941f-c04da9756c8f", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-692510832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b29d9865d614bbcba48367777782afd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98d96b75-ac36-499a-adc2-130c8c1d55ca", "external-id": "nsx-vlan-transportzone-564", "segmentation_id": 564, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7766d3a6-c9", "ovs_interfaceid": "7766d3a6-c9e9-46c7-ae9d-3e22ffcc98a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 679.565911] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5aa3259c-4ecb-404d-b7a8-ff77e49fd614 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Lock "ad957c30-c923-4bbf-8841-00e99de44781" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.158s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 679.763670] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 679.869395] env[69994]: DEBUG oslo_vmware.api [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Task: {'id': task-3241437, 'name': ReconfigVM_Task, 'duration_secs': 0.701235} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.871066] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Reconfigured VM instance instance-00000017 to attach disk [datastore2] 84bff4c0-9e2e-47f2-a378-70d3c992b58b/84bff4c0-9e2e-47f2-a378-70d3c992b58b.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 679.871206] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b056eef8-7d98-4ebe-ac1b-2392adf4d478 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.880187] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Acquiring lock "0b284e71-7af2-4782-b950-4f7eac5221a4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 679.880737] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Lock "0b284e71-7af2-4782-b950-4f7eac5221a4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 679.881072] env[69994]: DEBUG oslo_vmware.api [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Waiting for the task: (returnval){ [ 679.881072] env[69994]: value = "task-3241438" [ 679.881072] env[69994]: _type = "Task" [ 679.881072] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.893571] env[69994]: DEBUG oslo_vmware.api [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Task: {'id': task-3241438, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.966150] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Releasing lock "refresh_cache-cef66a67-e3ac-40dc-a8a4-0375bd64c484" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 679.966150] env[69994]: DEBUG nova.compute.manager [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Instance network_info: |[{"id": "7766d3a6-c9e9-46c7-ae9d-3e22ffcc98a0", "address": "fa:16:3e:68:a6:86", "network": {"id": "134b397d-887f-40ba-941f-c04da9756c8f", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-692510832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b29d9865d614bbcba48367777782afd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98d96b75-ac36-499a-adc2-130c8c1d55ca", "external-id": "nsx-vlan-transportzone-564", "segmentation_id": 564, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7766d3a6-c9", "ovs_interfaceid": "7766d3a6-c9e9-46c7-ae9d-3e22ffcc98a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 679.966320] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:68:a6:86', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '98d96b75-ac36-499a-adc2-130c8c1d55ca', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7766d3a6-c9e9-46c7-ae9d-3e22ffcc98a0', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 679.973469] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Creating folder: Project (2b29d9865d614bbcba48367777782afd). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 679.974704] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-304c7a67-1463-470f-92c7-0bbfac912add {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.992950] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Created folder: Project (2b29d9865d614bbcba48367777782afd) in parent group-v647729. [ 679.992950] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Creating folder: Instances. Parent ref: group-v647802. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 679.992950] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cc1c2849-0848-4c03-9ef7-c9ca496ba6bc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.000591] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Created folder: Instances in parent group-v647802. [ 680.000591] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 680.000591] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 680.000591] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-be00d0ca-1613-4c9b-82ef-94ed81c60de4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.747586] env[69994]: DEBUG nova.network.neutron [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Successfully updated port: f2652bdf-bba7-4a73-9045-397e55945ed1 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 680.749615] env[69994]: DEBUG nova.compute.manager [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 680.757109] env[69994]: DEBUG oslo_concurrency.lockutils [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Acquiring lock "e9bc15f9-e957-487f-b8d5-d1332b185dcf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 680.757109] env[69994]: DEBUG oslo_concurrency.lockutils [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Lock "e9bc15f9-e957-487f-b8d5-d1332b185dcf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 680.757109] env[69994]: DEBUG oslo_concurrency.lockutils [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Acquiring lock "e9bc15f9-e957-487f-b8d5-d1332b185dcf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 680.757109] env[69994]: DEBUG oslo_concurrency.lockutils [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Lock "e9bc15f9-e957-487f-b8d5-d1332b185dcf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 680.759108] env[69994]: DEBUG oslo_concurrency.lockutils [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Lock "e9bc15f9-e957-487f-b8d5-d1332b185dcf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 680.761534] env[69994]: INFO nova.compute.manager [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: e9bc15f9-e957-487f-b8d5-d1332b185dcf] Terminating instance [ 680.766311] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Acquiring lock "d4f87534-813e-4ff6-8b1f-ee23cb0b8e80" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 680.766564] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Lock "d4f87534-813e-4ff6-8b1f-ee23cb0b8e80" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 680.767975] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c541371a-f37d-4bd5-bf66-659bd05fe1b0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.773818] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 680.773818] env[69994]: value = "task-3241441" [ 680.773818] env[69994]: _type = "Task" [ 680.773818] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.783888] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-24e14080-4140-4546-afef-f5654edbdf86 tempest-ServersAdminNegativeTestJSON-171297159 tempest-ServersAdminNegativeTestJSON-171297159-project-admin] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Suspending the VM {{(pid=69994) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 680.784202] env[69994]: DEBUG oslo_vmware.api [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Task: {'id': task-3241438, 'name': Rename_Task, 'duration_secs': 0.250647} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.785032] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-d66d6cd6-4f67-4d74-9e37-34b0c589ade7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.786777] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 680.790215] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-830d8250-11c0-4b89-9004-dc63edc202d4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.792667] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241441, 'name': CreateVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.801198] env[69994]: DEBUG oslo_vmware.api [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Waiting for the task: (returnval){ [ 680.801198] env[69994]: value = "task-3241443" [ 680.801198] env[69994]: _type = "Task" [ 680.801198] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.805671] env[69994]: DEBUG oslo_vmware.api [None req-24e14080-4140-4546-afef-f5654edbdf86 tempest-ServersAdminNegativeTestJSON-171297159 tempest-ServersAdminNegativeTestJSON-171297159-project-admin] Waiting for the task: (returnval){ [ 680.805671] env[69994]: value = "task-3241442" [ 680.805671] env[69994]: _type = "Task" [ 680.805671] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.817859] env[69994]: DEBUG oslo_vmware.api [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Task: {'id': task-3241443, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.822650] env[69994]: DEBUG oslo_vmware.api [None req-24e14080-4140-4546-afef-f5654edbdf86 tempest-ServersAdminNegativeTestJSON-171297159 tempest-ServersAdminNegativeTestJSON-171297159-project-admin] Task: {'id': task-3241442, 'name': SuspendVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.040733] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Acquiring lock "86e514bb-8b47-4605-bd85-55c6c9874320" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 681.041121] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Lock "86e514bb-8b47-4605-bd85-55c6c9874320" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 681.262404] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Acquiring lock "refresh_cache-7f66a148-86fe-4ddc-b8ed-6e6a306bbc24" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.262585] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Acquired lock "refresh_cache-7f66a148-86fe-4ddc-b8ed-6e6a306bbc24" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 681.262762] env[69994]: DEBUG nova.network.neutron [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 681.287685] env[69994]: DEBUG oslo_concurrency.lockutils [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Acquiring lock "refresh_cache-e9bc15f9-e957-487f-b8d5-d1332b185dcf" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.287864] env[69994]: DEBUG oslo_concurrency.lockutils [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Acquired lock "refresh_cache-e9bc15f9-e957-487f-b8d5-d1332b185dcf" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 681.288060] env[69994]: DEBUG nova.network.neutron [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: e9bc15f9-e957-487f-b8d5-d1332b185dcf] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 681.289208] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241441, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.290229] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 681.321339] env[69994]: DEBUG oslo_vmware.api [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Task: {'id': task-3241443, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.327197] env[69994]: DEBUG nova.compute.manager [req-7aea4cb9-32a0-4f76-ad94-d30d51ebcf50 req-f42d7f2e-20a7-4d8f-88f6-2246654dce18 service nova] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Received event network-changed-7766d3a6-c9e9-46c7-ae9d-3e22ffcc98a0 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 681.327512] env[69994]: DEBUG nova.compute.manager [req-7aea4cb9-32a0-4f76-ad94-d30d51ebcf50 req-f42d7f2e-20a7-4d8f-88f6-2246654dce18 service nova] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Refreshing instance network info cache due to event network-changed-7766d3a6-c9e9-46c7-ae9d-3e22ffcc98a0. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 681.327747] env[69994]: DEBUG oslo_concurrency.lockutils [req-7aea4cb9-32a0-4f76-ad94-d30d51ebcf50 req-f42d7f2e-20a7-4d8f-88f6-2246654dce18 service nova] Acquiring lock "refresh_cache-cef66a67-e3ac-40dc-a8a4-0375bd64c484" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.327908] env[69994]: DEBUG oslo_concurrency.lockutils [req-7aea4cb9-32a0-4f76-ad94-d30d51ebcf50 req-f42d7f2e-20a7-4d8f-88f6-2246654dce18 service nova] Acquired lock "refresh_cache-cef66a67-e3ac-40dc-a8a4-0375bd64c484" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 681.328084] env[69994]: DEBUG nova.network.neutron [req-7aea4cb9-32a0-4f76-ad94-d30d51ebcf50 req-f42d7f2e-20a7-4d8f-88f6-2246654dce18 service nova] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Refreshing network info cache for port 7766d3a6-c9e9-46c7-ae9d-3e22ffcc98a0 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 681.334727] env[69994]: DEBUG oslo_vmware.api [None req-24e14080-4140-4546-afef-f5654edbdf86 tempest-ServersAdminNegativeTestJSON-171297159 tempest-ServersAdminNegativeTestJSON-171297159-project-admin] Task: {'id': task-3241442, 'name': SuspendVM_Task} progress is 62%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.520511] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5fc4fb2-ee3a-4d9e-bd51-37940326b7a4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.528531] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e9b579d-e0ef-4627-8f25-fad6dd7ae886 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.573368] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0c1a6cc-61e0-48d9-aa88-fa326b58aad3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.583376] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20b123b6-27ee-4aea-ae8b-e644c49e03c6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.603609] env[69994]: DEBUG nova.compute.provider_tree [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 681.788022] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241441, 'name': CreateVM_Task, 'duration_secs': 0.907537} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.788406] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 681.789179] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.789396] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 681.789829] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 681.790158] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74c2e3dc-9546-45a4-a4f6-4105de0278d5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.797536] env[69994]: DEBUG oslo_vmware.api [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Waiting for the task: (returnval){ [ 681.797536] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52b79f40-4776-3c99-cb4a-dac08dc3e440" [ 681.797536] env[69994]: _type = "Task" [ 681.797536] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.814469] env[69994]: DEBUG oslo_vmware.api [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52b79f40-4776-3c99-cb4a-dac08dc3e440, 'name': SearchDatastore_Task, 'duration_secs': 0.013772} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.821849] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 681.822199] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 681.822490] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.822713] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 681.822899] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 681.823250] env[69994]: DEBUG oslo_vmware.api [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Task: {'id': task-3241443, 'name': PowerOnVM_Task, 'duration_secs': 0.968156} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.823498] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e64ca10c-b1bc-4bf4-a64c-2e51dd6c3810 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.825532] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 681.825797] env[69994]: INFO nova.compute.manager [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Took 10.70 seconds to spawn the instance on the hypervisor. [ 681.825973] env[69994]: DEBUG nova.compute.manager [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 681.827417] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e4ae3df-d867-456d-83b8-a5e7e3cce20e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.831647] env[69994]: DEBUG nova.network.neutron [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: e9bc15f9-e957-487f-b8d5-d1332b185dcf] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 681.844374] env[69994]: DEBUG oslo_vmware.api [None req-24e14080-4140-4546-afef-f5654edbdf86 tempest-ServersAdminNegativeTestJSON-171297159 tempest-ServersAdminNegativeTestJSON-171297159-project-admin] Task: {'id': task-3241442, 'name': SuspendVM_Task, 'duration_secs': 0.670117} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.845444] env[69994]: DEBUG nova.network.neutron [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 681.848282] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-24e14080-4140-4546-afef-f5654edbdf86 tempest-ServersAdminNegativeTestJSON-171297159 tempest-ServersAdminNegativeTestJSON-171297159-project-admin] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Suspended the VM {{(pid=69994) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 681.848538] env[69994]: DEBUG nova.compute.manager [None req-24e14080-4140-4546-afef-f5654edbdf86 tempest-ServersAdminNegativeTestJSON-171297159 tempest-ServersAdminNegativeTestJSON-171297159-project-admin] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 681.850150] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b8308b0-9b4c-4362-bdcc-57efded0a9f1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.865028] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 681.865028] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 681.865028] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-568c218f-3344-4ac8-b425-c2b86b50bd78 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.878140] env[69994]: DEBUG oslo_vmware.api [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Waiting for the task: (returnval){ [ 681.878140] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5230f897-b0a6-1bf3-098a-aa2e5eef6d41" [ 681.878140] env[69994]: _type = "Task" [ 681.878140] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.888227] env[69994]: DEBUG oslo_vmware.api [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5230f897-b0a6-1bf3-098a-aa2e5eef6d41, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.975058] env[69994]: DEBUG nova.network.neutron [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: e9bc15f9-e957-487f-b8d5-d1332b185dcf] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.086298] env[69994]: DEBUG nova.network.neutron [req-7aea4cb9-32a0-4f76-ad94-d30d51ebcf50 req-f42d7f2e-20a7-4d8f-88f6-2246654dce18 service nova] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Updated VIF entry in instance network info cache for port 7766d3a6-c9e9-46c7-ae9d-3e22ffcc98a0. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 682.086606] env[69994]: DEBUG nova.network.neutron [req-7aea4cb9-32a0-4f76-ad94-d30d51ebcf50 req-f42d7f2e-20a7-4d8f-88f6-2246654dce18 service nova] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Updating instance_info_cache with network_info: [{"id": "7766d3a6-c9e9-46c7-ae9d-3e22ffcc98a0", "address": "fa:16:3e:68:a6:86", "network": {"id": "134b397d-887f-40ba-941f-c04da9756c8f", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-692510832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b29d9865d614bbcba48367777782afd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98d96b75-ac36-499a-adc2-130c8c1d55ca", "external-id": "nsx-vlan-transportzone-564", "segmentation_id": 564, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7766d3a6-c9", "ovs_interfaceid": "7766d3a6-c9e9-46c7-ae9d-3e22ffcc98a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.110431] env[69994]: DEBUG nova.scheduler.client.report [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 682.120588] env[69994]: DEBUG nova.network.neutron [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Updating instance_info_cache with network_info: [{"id": "f2652bdf-bba7-4a73-9045-397e55945ed1", "address": "fa:16:3e:c5:70:d6", "network": {"id": "6e027deb-e9dd-4d16-a0cf-5c75f1d53722", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1614407267-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7799f51750bb4c2589042a3b7bc8af01", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721c6720-3ce0-450e-9951-a894f03acc27", "external-id": "nsx-vlan-transportzone-394", "segmentation_id": 394, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2652bdf-bb", "ovs_interfaceid": "f2652bdf-bba7-4a73-9045-397e55945ed1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.371731] env[69994]: INFO nova.compute.manager [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Took 39.65 seconds to build instance. [ 682.389229] env[69994]: DEBUG oslo_vmware.api [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5230f897-b0a6-1bf3-098a-aa2e5eef6d41, 'name': SearchDatastore_Task, 'duration_secs': 0.01584} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.391097] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d6679da-3469-40ae-8367-a36746fcde14 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.396730] env[69994]: DEBUG oslo_vmware.api [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Waiting for the task: (returnval){ [ 682.396730] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5216b79e-3def-200e-ed22-0a20cedd7cd8" [ 682.396730] env[69994]: _type = "Task" [ 682.396730] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.404705] env[69994]: DEBUG oslo_vmware.api [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5216b79e-3def-200e-ed22-0a20cedd7cd8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.478506] env[69994]: DEBUG oslo_concurrency.lockutils [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Releasing lock "refresh_cache-e9bc15f9-e957-487f-b8d5-d1332b185dcf" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 682.478949] env[69994]: DEBUG nova.compute.manager [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: e9bc15f9-e957-487f-b8d5-d1332b185dcf] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 682.479725] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: e9bc15f9-e957-487f-b8d5-d1332b185dcf] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 682.480641] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a7bb111-4f23-41e2-a732-e96c061e2d77 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.488026] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: e9bc15f9-e957-487f-b8d5-d1332b185dcf] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 682.488278] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cf0ef930-1af1-4307-9e37-718533f0e604 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.494676] env[69994]: DEBUG oslo_vmware.api [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Waiting for the task: (returnval){ [ 682.494676] env[69994]: value = "task-3241444" [ 682.494676] env[69994]: _type = "Task" [ 682.494676] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.502613] env[69994]: DEBUG oslo_vmware.api [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241444, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.596224] env[69994]: DEBUG oslo_concurrency.lockutils [req-7aea4cb9-32a0-4f76-ad94-d30d51ebcf50 req-f42d7f2e-20a7-4d8f-88f6-2246654dce18 service nova] Releasing lock "refresh_cache-cef66a67-e3ac-40dc-a8a4-0375bd64c484" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 682.596531] env[69994]: DEBUG nova.compute.manager [req-7aea4cb9-32a0-4f76-ad94-d30d51ebcf50 req-f42d7f2e-20a7-4d8f-88f6-2246654dce18 service nova] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Received event network-changed-003af7d4-a8a5-43d4-b032-96df0b4ae173 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 682.596745] env[69994]: DEBUG nova.compute.manager [req-7aea4cb9-32a0-4f76-ad94-d30d51ebcf50 req-f42d7f2e-20a7-4d8f-88f6-2246654dce18 service nova] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Refreshing instance network info cache due to event network-changed-003af7d4-a8a5-43d4-b032-96df0b4ae173. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 682.597034] env[69994]: DEBUG oslo_concurrency.lockutils [req-7aea4cb9-32a0-4f76-ad94-d30d51ebcf50 req-f42d7f2e-20a7-4d8f-88f6-2246654dce18 service nova] Acquiring lock "refresh_cache-45a8dced-6c49-441c-92e2-ee323ed8753c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 682.597150] env[69994]: DEBUG oslo_concurrency.lockutils [req-7aea4cb9-32a0-4f76-ad94-d30d51ebcf50 req-f42d7f2e-20a7-4d8f-88f6-2246654dce18 service nova] Acquired lock "refresh_cache-45a8dced-6c49-441c-92e2-ee323ed8753c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 682.597390] env[69994]: DEBUG nova.network.neutron [req-7aea4cb9-32a0-4f76-ad94-d30d51ebcf50 req-f42d7f2e-20a7-4d8f-88f6-2246654dce18 service nova] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Refreshing network info cache for port 003af7d4-a8a5-43d4-b032-96df0b4ae173 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 682.614777] env[69994]: DEBUG oslo_concurrency.lockutils [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.444s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 682.614948] env[69994]: DEBUG nova.compute.manager [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 682.620021] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.387s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 682.620021] env[69994]: DEBUG nova.objects.instance [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Lazy-loading 'resources' on Instance uuid f109c803-bf37-4845-8956-4336dbc8a946 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 682.625327] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Releasing lock "refresh_cache-7f66a148-86fe-4ddc-b8ed-6e6a306bbc24" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 682.625624] env[69994]: DEBUG nova.compute.manager [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Instance network_info: |[{"id": "f2652bdf-bba7-4a73-9045-397e55945ed1", "address": "fa:16:3e:c5:70:d6", "network": {"id": "6e027deb-e9dd-4d16-a0cf-5c75f1d53722", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1614407267-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7799f51750bb4c2589042a3b7bc8af01", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721c6720-3ce0-450e-9951-a894f03acc27", "external-id": "nsx-vlan-transportzone-394", "segmentation_id": 394, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2652bdf-bb", "ovs_interfaceid": "f2652bdf-bba7-4a73-9045-397e55945ed1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 682.626580] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c5:70:d6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '721c6720-3ce0-450e-9951-a894f03acc27', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f2652bdf-bba7-4a73-9045-397e55945ed1', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 682.635212] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Creating folder: Project (7799f51750bb4c2589042a3b7bc8af01). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 682.635505] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d56a633e-0b93-4771-b98d-c2021907a81d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.646667] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Created folder: Project (7799f51750bb4c2589042a3b7bc8af01) in parent group-v647729. [ 682.646867] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Creating folder: Instances. Parent ref: group-v647805. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 682.647134] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dab64bed-66ba-44ca-9d8b-b6955d4079ad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.656155] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Created folder: Instances in parent group-v647805. [ 682.656376] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 682.656576] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 682.656823] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-55d29669-e802-4532-8db0-9dd02bffe218 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.676263] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 682.676263] env[69994]: value = "task-3241447" [ 682.676263] env[69994]: _type = "Task" [ 682.676263] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.685353] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241447, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.875426] env[69994]: DEBUG oslo_concurrency.lockutils [None req-89345096-5efe-4fe8-b6fd-698bb6b2507b tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Lock "84bff4c0-9e2e-47f2-a378-70d3c992b58b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.148s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 682.910488] env[69994]: DEBUG oslo_vmware.api [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5216b79e-3def-200e-ed22-0a20cedd7cd8, 'name': SearchDatastore_Task, 'duration_secs': 0.036155} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.910488] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 682.910488] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] cef66a67-e3ac-40dc-a8a4-0375bd64c484/cef66a67-e3ac-40dc-a8a4-0375bd64c484.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 682.910488] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-479376c2-f941-4087-b6a8-0b667ae8c636 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.917021] env[69994]: DEBUG oslo_vmware.api [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Waiting for the task: (returnval){ [ 682.917021] env[69994]: value = "task-3241448" [ 682.917021] env[69994]: _type = "Task" [ 682.917021] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.924560] env[69994]: DEBUG oslo_vmware.api [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Task: {'id': task-3241448, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.009107] env[69994]: DEBUG oslo_vmware.api [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241444, 'name': PowerOffVM_Task, 'duration_secs': 0.171251} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.009107] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: e9bc15f9-e957-487f-b8d5-d1332b185dcf] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 683.009752] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: e9bc15f9-e957-487f-b8d5-d1332b185dcf] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 683.010296] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7e72947b-2359-417a-98c8-6dfbf9eb4c70 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.041029] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: e9bc15f9-e957-487f-b8d5-d1332b185dcf] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 683.041029] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: e9bc15f9-e957-487f-b8d5-d1332b185dcf] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 683.041029] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Deleting the datastore file [datastore1] e9bc15f9-e957-487f-b8d5-d1332b185dcf {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 683.041029] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2b4d150b-b22b-467d-8b35-c57a778965d5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.047012] env[69994]: DEBUG oslo_vmware.api [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Waiting for the task: (returnval){ [ 683.047012] env[69994]: value = "task-3241450" [ 683.047012] env[69994]: _type = "Task" [ 683.047012] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.059130] env[69994]: DEBUG oslo_vmware.api [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241450, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.123022] env[69994]: DEBUG nova.compute.utils [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 683.124020] env[69994]: DEBUG nova.compute.manager [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 683.124464] env[69994]: DEBUG nova.network.neutron [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 683.187217] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241447, 'name': CreateVM_Task, 'duration_secs': 0.363316} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.190956] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 683.196958] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.197223] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 683.197617] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 683.197938] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e1d3eb4-b52e-48cb-adab-e1d40ccf51e1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.204487] env[69994]: DEBUG oslo_vmware.api [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for the task: (returnval){ [ 683.204487] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5227e9f5-c334-2f6a-fe3c-60dc3e54c308" [ 683.204487] env[69994]: _type = "Task" [ 683.204487] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.216277] env[69994]: DEBUG oslo_vmware.api [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5227e9f5-c334-2f6a-fe3c-60dc3e54c308, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.218924] env[69994]: DEBUG nova.policy [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3d38df417e9f44d39a794ea2ad0ce29d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eea3a16a357a4c818ee5b3b69c9149cb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 683.378927] env[69994]: DEBUG nova.compute.manager [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 683.429046] env[69994]: DEBUG oslo_vmware.api [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Task: {'id': task-3241448, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.526352] env[69994]: DEBUG nova.compute.manager [None req-f70e41d2-49f1-419d-aeca-723da3880665 tempest-ServerExternalEventsTest-386695307 tempest-ServerExternalEventsTest-386695307-project] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Received event network-changed {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 683.526352] env[69994]: DEBUG nova.compute.manager [None req-f70e41d2-49f1-419d-aeca-723da3880665 tempest-ServerExternalEventsTest-386695307 tempest-ServerExternalEventsTest-386695307-project] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Refreshing instance network info cache due to event network-changed. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 683.526472] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f70e41d2-49f1-419d-aeca-723da3880665 tempest-ServerExternalEventsTest-386695307 tempest-ServerExternalEventsTest-386695307-project] Acquiring lock "refresh_cache-84bff4c0-9e2e-47f2-a378-70d3c992b58b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.526557] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f70e41d2-49f1-419d-aeca-723da3880665 tempest-ServerExternalEventsTest-386695307 tempest-ServerExternalEventsTest-386695307-project] Acquired lock "refresh_cache-84bff4c0-9e2e-47f2-a378-70d3c992b58b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 683.526714] env[69994]: DEBUG nova.network.neutron [None req-f70e41d2-49f1-419d-aeca-723da3880665 tempest-ServerExternalEventsTest-386695307 tempest-ServerExternalEventsTest-386695307-project] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 683.557179] env[69994]: DEBUG nova.network.neutron [req-7aea4cb9-32a0-4f76-ad94-d30d51ebcf50 req-f42d7f2e-20a7-4d8f-88f6-2246654dce18 service nova] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Updated VIF entry in instance network info cache for port 003af7d4-a8a5-43d4-b032-96df0b4ae173. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 683.557179] env[69994]: DEBUG nova.network.neutron [req-7aea4cb9-32a0-4f76-ad94-d30d51ebcf50 req-f42d7f2e-20a7-4d8f-88f6-2246654dce18 service nova] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Updating instance_info_cache with network_info: [{"id": "003af7d4-a8a5-43d4-b032-96df0b4ae173", "address": "fa:16:3e:a4:62:49", "network": {"id": "6d4a143e-4f55-4918-8454-462892aacf0e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1594130263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "605d72502cc644bfa4d875bf348246de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52e117d3-d120-42c6-8e72-70085845acbf", "external-id": "nsx-vlan-transportzone-934", "segmentation_id": 934, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap003af7d4-a8", "ovs_interfaceid": "003af7d4-a8a5-43d4-b032-96df0b4ae173", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 683.562568] env[69994]: DEBUG oslo_vmware.api [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Task: {'id': task-3241450, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.108082} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.563337] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 683.563631] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: e9bc15f9-e957-487f-b8d5-d1332b185dcf] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 683.563901] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: e9bc15f9-e957-487f-b8d5-d1332b185dcf] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 683.564230] env[69994]: INFO nova.compute.manager [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [instance: e9bc15f9-e957-487f-b8d5-d1332b185dcf] Took 1.09 seconds to destroy the instance on the hypervisor. [ 683.564576] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 683.564852] env[69994]: DEBUG nova.compute.manager [-] [instance: e9bc15f9-e957-487f-b8d5-d1332b185dcf] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 683.565040] env[69994]: DEBUG nova.network.neutron [-] [instance: e9bc15f9-e957-487f-b8d5-d1332b185dcf] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 683.585627] env[69994]: DEBUG nova.network.neutron [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Successfully created port: e91dc2f6-726a-485a-9736-6de1fad4e0ae {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 683.590991] env[69994]: DEBUG nova.network.neutron [-] [instance: e9bc15f9-e957-487f-b8d5-d1332b185dcf] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 683.639198] env[69994]: DEBUG nova.compute.manager [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 683.716743] env[69994]: DEBUG oslo_vmware.api [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5227e9f5-c334-2f6a-fe3c-60dc3e54c308, 'name': SearchDatastore_Task, 'duration_secs': 0.062643} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.717048] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 683.717286] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 683.717542] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.717691] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 683.717866] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 683.718129] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-50efaae9-2285-444c-b434-7038283a6ebf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.726978] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 683.728134] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 683.731448] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bed47a94-7e7a-4112-899d-8c5f4d9a6275 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.738660] env[69994]: DEBUG oslo_vmware.api [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for the task: (returnval){ [ 683.738660] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52a73c90-d695-d114-4894-852415e507aa" [ 683.738660] env[69994]: _type = "Task" [ 683.738660] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.747784] env[69994]: DEBUG oslo_vmware.api [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52a73c90-d695-d114-4894-852415e507aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.831991] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e6cdaf6-ff5a-499c-a502-a52ff94c620b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.840824] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-508e6947-3756-4114-8e22-197e9e3613b8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.870662] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62d98a28-0286-4a7f-bf10-f7caf351a5ef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.878502] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29799683-7d51-423f-944c-9ee2061c5e34 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.894933] env[69994]: DEBUG nova.compute.provider_tree [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 683.907028] env[69994]: DEBUG oslo_concurrency.lockutils [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 683.928442] env[69994]: DEBUG oslo_vmware.api [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Task: {'id': task-3241448, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.615854} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.928726] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] cef66a67-e3ac-40dc-a8a4-0375bd64c484/cef66a67-e3ac-40dc-a8a4-0375bd64c484.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 683.928932] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 683.929191] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9b032325-ec81-4359-9791-67ead36ef227 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.935144] env[69994]: DEBUG oslo_vmware.api [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Waiting for the task: (returnval){ [ 683.935144] env[69994]: value = "task-3241451" [ 683.935144] env[69994]: _type = "Task" [ 683.935144] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.944129] env[69994]: DEBUG oslo_vmware.api [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Task: {'id': task-3241451, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.064281] env[69994]: DEBUG oslo_concurrency.lockutils [req-7aea4cb9-32a0-4f76-ad94-d30d51ebcf50 req-f42d7f2e-20a7-4d8f-88f6-2246654dce18 service nova] Releasing lock "refresh_cache-45a8dced-6c49-441c-92e2-ee323ed8753c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 684.064556] env[69994]: DEBUG nova.compute.manager [req-7aea4cb9-32a0-4f76-ad94-d30d51ebcf50 req-f42d7f2e-20a7-4d8f-88f6-2246654dce18 service nova] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Received event network-vif-plugged-f2652bdf-bba7-4a73-9045-397e55945ed1 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 684.064750] env[69994]: DEBUG oslo_concurrency.lockutils [req-7aea4cb9-32a0-4f76-ad94-d30d51ebcf50 req-f42d7f2e-20a7-4d8f-88f6-2246654dce18 service nova] Acquiring lock "7f66a148-86fe-4ddc-b8ed-6e6a306bbc24-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 684.064951] env[69994]: DEBUG oslo_concurrency.lockutils [req-7aea4cb9-32a0-4f76-ad94-d30d51ebcf50 req-f42d7f2e-20a7-4d8f-88f6-2246654dce18 service nova] Lock "7f66a148-86fe-4ddc-b8ed-6e6a306bbc24-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 684.065131] env[69994]: DEBUG oslo_concurrency.lockutils [req-7aea4cb9-32a0-4f76-ad94-d30d51ebcf50 req-f42d7f2e-20a7-4d8f-88f6-2246654dce18 service nova] Lock "7f66a148-86fe-4ddc-b8ed-6e6a306bbc24-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 684.065329] env[69994]: DEBUG nova.compute.manager [req-7aea4cb9-32a0-4f76-ad94-d30d51ebcf50 req-f42d7f2e-20a7-4d8f-88f6-2246654dce18 service nova] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] No waiting events found dispatching network-vif-plugged-f2652bdf-bba7-4a73-9045-397e55945ed1 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 684.065510] env[69994]: WARNING nova.compute.manager [req-7aea4cb9-32a0-4f76-ad94-d30d51ebcf50 req-f42d7f2e-20a7-4d8f-88f6-2246654dce18 service nova] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Received unexpected event network-vif-plugged-f2652bdf-bba7-4a73-9045-397e55945ed1 for instance with vm_state building and task_state spawning. [ 684.065679] env[69994]: DEBUG nova.compute.manager [req-7aea4cb9-32a0-4f76-ad94-d30d51ebcf50 req-f42d7f2e-20a7-4d8f-88f6-2246654dce18 service nova] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Received event network-changed-f2652bdf-bba7-4a73-9045-397e55945ed1 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 684.065832] env[69994]: DEBUG nova.compute.manager [req-7aea4cb9-32a0-4f76-ad94-d30d51ebcf50 req-f42d7f2e-20a7-4d8f-88f6-2246654dce18 service nova] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Refreshing instance network info cache due to event network-changed-f2652bdf-bba7-4a73-9045-397e55945ed1. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 684.066013] env[69994]: DEBUG oslo_concurrency.lockutils [req-7aea4cb9-32a0-4f76-ad94-d30d51ebcf50 req-f42d7f2e-20a7-4d8f-88f6-2246654dce18 service nova] Acquiring lock "refresh_cache-7f66a148-86fe-4ddc-b8ed-6e6a306bbc24" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 684.066150] env[69994]: DEBUG oslo_concurrency.lockutils [req-7aea4cb9-32a0-4f76-ad94-d30d51ebcf50 req-f42d7f2e-20a7-4d8f-88f6-2246654dce18 service nova] Acquired lock "refresh_cache-7f66a148-86fe-4ddc-b8ed-6e6a306bbc24" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 684.066304] env[69994]: DEBUG nova.network.neutron [req-7aea4cb9-32a0-4f76-ad94-d30d51ebcf50 req-f42d7f2e-20a7-4d8f-88f6-2246654dce18 service nova] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Refreshing network info cache for port f2652bdf-bba7-4a73-9045-397e55945ed1 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 684.095792] env[69994]: DEBUG nova.network.neutron [-] [instance: e9bc15f9-e957-487f-b8d5-d1332b185dcf] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 684.248438] env[69994]: DEBUG oslo_vmware.api [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52a73c90-d695-d114-4894-852415e507aa, 'name': SearchDatastore_Task, 'duration_secs': 0.009502} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.249340] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ee058d7-675b-4b9d-b277-6a071948a3b4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.256096] env[69994]: DEBUG oslo_vmware.api [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for the task: (returnval){ [ 684.256096] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52422124-a074-29db-22b0-679cd04965ca" [ 684.256096] env[69994]: _type = "Task" [ 684.256096] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.264947] env[69994]: DEBUG oslo_vmware.api [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52422124-a074-29db-22b0-679cd04965ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.349493] env[69994]: DEBUG nova.network.neutron [None req-f70e41d2-49f1-419d-aeca-723da3880665 tempest-ServerExternalEventsTest-386695307 tempest-ServerExternalEventsTest-386695307-project] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Updating instance_info_cache with network_info: [{"id": "645be6ca-7ed3-4e18-affb-c05101e3a42f", "address": "fa:16:3e:4d:e5:07", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.58", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap645be6ca-7e", "ovs_interfaceid": "645be6ca-7ed3-4e18-affb-c05101e3a42f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 684.430624] env[69994]: ERROR nova.scheduler.client.report [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] [req-c4f55aa6-4a39-4ce6-b966-5eb416926f06] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 92ce3c95-4efe-4d04-802b-6b187afc5aa7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c4f55aa6-4a39-4ce6-b966-5eb416926f06"}]} [ 684.449039] env[69994]: DEBUG oslo_vmware.api [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Task: {'id': task-3241451, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063697} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.449039] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 684.449039] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66f1f1eb-f23f-42d8-b90c-5f385f1efdd9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.454715] env[69994]: DEBUG nova.scheduler.client.report [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Refreshing inventories for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 684.477114] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Reconfiguring VM instance instance-00000018 to attach disk [datastore2] cef66a67-e3ac-40dc-a8a4-0375bd64c484/cef66a67-e3ac-40dc-a8a4-0375bd64c484.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 684.478459] env[69994]: DEBUG nova.scheduler.client.report [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Updating ProviderTree inventory for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 684.478726] env[69994]: DEBUG nova.compute.provider_tree [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 684.480861] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1fa5617e-73b3-4a62-b9ac-22988e8646ef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.496358] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0d56f690-5a19-48c2-87a9-1740fed61a6a tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Acquiring lock "84bff4c0-9e2e-47f2-a378-70d3c992b58b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 684.496489] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0d56f690-5a19-48c2-87a9-1740fed61a6a tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Lock "84bff4c0-9e2e-47f2-a378-70d3c992b58b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 684.496598] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0d56f690-5a19-48c2-87a9-1740fed61a6a tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Acquiring lock "84bff4c0-9e2e-47f2-a378-70d3c992b58b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 684.496851] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0d56f690-5a19-48c2-87a9-1740fed61a6a tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Lock "84bff4c0-9e2e-47f2-a378-70d3c992b58b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 684.496991] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0d56f690-5a19-48c2-87a9-1740fed61a6a tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Lock "84bff4c0-9e2e-47f2-a378-70d3c992b58b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 684.500093] env[69994]: INFO nova.compute.manager [None req-0d56f690-5a19-48c2-87a9-1740fed61a6a tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Terminating instance [ 684.505692] env[69994]: DEBUG oslo_vmware.api [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Waiting for the task: (returnval){ [ 684.505692] env[69994]: value = "task-3241452" [ 684.505692] env[69994]: _type = "Task" [ 684.505692] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.509536] env[69994]: DEBUG nova.scheduler.client.report [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Refreshing aggregate associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 684.518037] env[69994]: DEBUG oslo_vmware.api [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Task: {'id': task-3241452, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.543160] env[69994]: DEBUG nova.scheduler.client.report [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Refreshing trait associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 684.602371] env[69994]: INFO nova.compute.manager [-] [instance: e9bc15f9-e957-487f-b8d5-d1332b185dcf] Took 1.04 seconds to deallocate network for instance. [ 684.658018] env[69994]: DEBUG nova.compute.manager [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 684.711308] env[69994]: DEBUG nova.virt.hardware [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 684.711630] env[69994]: DEBUG nova.virt.hardware [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 684.711782] env[69994]: DEBUG nova.virt.hardware [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 684.714274] env[69994]: DEBUG nova.virt.hardware [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 684.714274] env[69994]: DEBUG nova.virt.hardware [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 684.714274] env[69994]: DEBUG nova.virt.hardware [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 684.714274] env[69994]: DEBUG nova.virt.hardware [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 684.714274] env[69994]: DEBUG nova.virt.hardware [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 684.714809] env[69994]: DEBUG nova.virt.hardware [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 684.714809] env[69994]: DEBUG nova.virt.hardware [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 684.714809] env[69994]: DEBUG nova.virt.hardware [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 684.715140] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd697063-1e8a-4ade-8e40-b57daa6327ff {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.731995] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be9fee46-ecef-4610-bdcb-f99e427168a8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.767529] env[69994]: DEBUG oslo_vmware.api [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52422124-a074-29db-22b0-679cd04965ca, 'name': SearchDatastore_Task, 'duration_secs': 0.011207} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.770446] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 684.770744] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24/7f66a148-86fe-4ddc-b8ed-6e6a306bbc24.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 684.771237] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f10dad26-1627-4283-9fe7-95ea459848f9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.778365] env[69994]: DEBUG oslo_vmware.api [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for the task: (returnval){ [ 684.778365] env[69994]: value = "task-3241453" [ 684.778365] env[69994]: _type = "Task" [ 684.778365] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.789941] env[69994]: DEBUG oslo_vmware.api [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3241453, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.854837] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f70e41d2-49f1-419d-aeca-723da3880665 tempest-ServerExternalEventsTest-386695307 tempest-ServerExternalEventsTest-386695307-project] Releasing lock "refresh_cache-84bff4c0-9e2e-47f2-a378-70d3c992b58b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 684.969050] env[69994]: DEBUG nova.network.neutron [req-7aea4cb9-32a0-4f76-ad94-d30d51ebcf50 req-f42d7f2e-20a7-4d8f-88f6-2246654dce18 service nova] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Updated VIF entry in instance network info cache for port f2652bdf-bba7-4a73-9045-397e55945ed1. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 684.969050] env[69994]: DEBUG nova.network.neutron [req-7aea4cb9-32a0-4f76-ad94-d30d51ebcf50 req-f42d7f2e-20a7-4d8f-88f6-2246654dce18 service nova] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Updating instance_info_cache with network_info: [{"id": "f2652bdf-bba7-4a73-9045-397e55945ed1", "address": "fa:16:3e:c5:70:d6", "network": {"id": "6e027deb-e9dd-4d16-a0cf-5c75f1d53722", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1614407267-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7799f51750bb4c2589042a3b7bc8af01", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721c6720-3ce0-450e-9951-a894f03acc27", "external-id": "nsx-vlan-transportzone-394", "segmentation_id": 394, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2652bdf-bb", "ovs_interfaceid": "f2652bdf-bba7-4a73-9045-397e55945ed1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.006102] env[69994]: DEBUG nova.compute.manager [None req-0d56f690-5a19-48c2-87a9-1740fed61a6a tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 685.006315] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0d56f690-5a19-48c2-87a9-1740fed61a6a tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 685.008127] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4485a6b8-955b-4a27-aea5-54be47fd496a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.028175] env[69994]: DEBUG oslo_vmware.api [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Task: {'id': task-3241452, 'name': ReconfigVM_Task, 'duration_secs': 0.461923} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.030760] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Reconfigured VM instance instance-00000018 to attach disk [datastore2] cef66a67-e3ac-40dc-a8a4-0375bd64c484/cef66a67-e3ac-40dc-a8a4-0375bd64c484.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 685.031605] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d56f690-5a19-48c2-87a9-1740fed61a6a tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 685.032121] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-99eb4f72-a2b3-4e4f-b3b4-50a3067fc515 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.034123] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bdab6162-8239-489c-9332-dbefda0936d7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.042851] env[69994]: DEBUG oslo_vmware.api [None req-0d56f690-5a19-48c2-87a9-1740fed61a6a tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Waiting for the task: (returnval){ [ 685.042851] env[69994]: value = "task-3241455" [ 685.042851] env[69994]: _type = "Task" [ 685.042851] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.043297] env[69994]: DEBUG oslo_vmware.api [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Waiting for the task: (returnval){ [ 685.043297] env[69994]: value = "task-3241454" [ 685.043297] env[69994]: _type = "Task" [ 685.043297] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.064650] env[69994]: DEBUG oslo_vmware.api [None req-0d56f690-5a19-48c2-87a9-1740fed61a6a tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Task: {'id': task-3241455, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.067237] env[69994]: DEBUG oslo_vmware.api [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Task: {'id': task-3241454, 'name': Rename_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.115432] env[69994]: DEBUG oslo_concurrency.lockutils [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 685.172894] env[69994]: DEBUG oslo_vmware.rw_handles [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520f1f0a-d2f6-eecc-661b-06c45af60b4d/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 685.173870] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c232e3d8-928f-49ae-b380-f5d33cf24308 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.184571] env[69994]: DEBUG oslo_vmware.rw_handles [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520f1f0a-d2f6-eecc-661b-06c45af60b4d/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 685.184780] env[69994]: ERROR oslo_vmware.rw_handles [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520f1f0a-d2f6-eecc-661b-06c45af60b4d/disk-0.vmdk due to incomplete transfer. [ 685.185047] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-21ad5072-ac42-4314-a8f5-138cca9fa7c2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.194518] env[69994]: DEBUG oslo_vmware.rw_handles [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520f1f0a-d2f6-eecc-661b-06c45af60b4d/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 685.194702] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Uploaded image b0eeb296-ea9a-4b6e-b148-a29401cca7af to the Glance image server {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 685.197253] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Destroying the VM {{(pid=69994) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 685.200425] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-46f45851-d43e-483b-a8dc-cb740a3ffbce {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.206881] env[69994]: DEBUG oslo_vmware.api [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Waiting for the task: (returnval){ [ 685.206881] env[69994]: value = "task-3241456" [ 685.206881] env[69994]: _type = "Task" [ 685.206881] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.222461] env[69994]: DEBUG oslo_vmware.api [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241456, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.257637] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73c6a8d4-54c7-4ac0-8655-83bb3e2addf8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.268381] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e73e9335-4e50-4a34-9b3a-9c4fcc1bbeff {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.310441] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d6849cf-da0a-49b0-8790-89b5535f6ccf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.314048] env[69994]: DEBUG nova.network.neutron [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Successfully updated port: e91dc2f6-726a-485a-9736-6de1fad4e0ae {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 685.320794] env[69994]: DEBUG oslo_vmware.api [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3241453, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.324760] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d706266-0976-4440-a911-844bffc07be1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.341446] env[69994]: DEBUG nova.compute.provider_tree [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 685.428827] env[69994]: DEBUG nova.compute.manager [req-c30963f2-7164-4dc5-8b24-e6012fc36dde req-44a9fc71-a4e3-4015-b4fc-df09415376d9 service nova] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Received event network-vif-plugged-e91dc2f6-726a-485a-9736-6de1fad4e0ae {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 685.429147] env[69994]: DEBUG oslo_concurrency.lockutils [req-c30963f2-7164-4dc5-8b24-e6012fc36dde req-44a9fc71-a4e3-4015-b4fc-df09415376d9 service nova] Acquiring lock "6e8286a3-6fd1-44ee-a5ca-b21f3178334d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 685.429400] env[69994]: DEBUG oslo_concurrency.lockutils [req-c30963f2-7164-4dc5-8b24-e6012fc36dde req-44a9fc71-a4e3-4015-b4fc-df09415376d9 service nova] Lock "6e8286a3-6fd1-44ee-a5ca-b21f3178334d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 685.429609] env[69994]: DEBUG oslo_concurrency.lockutils [req-c30963f2-7164-4dc5-8b24-e6012fc36dde req-44a9fc71-a4e3-4015-b4fc-df09415376d9 service nova] Lock "6e8286a3-6fd1-44ee-a5ca-b21f3178334d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 685.429871] env[69994]: DEBUG nova.compute.manager [req-c30963f2-7164-4dc5-8b24-e6012fc36dde req-44a9fc71-a4e3-4015-b4fc-df09415376d9 service nova] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] No waiting events found dispatching network-vif-plugged-e91dc2f6-726a-485a-9736-6de1fad4e0ae {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 685.430120] env[69994]: WARNING nova.compute.manager [req-c30963f2-7164-4dc5-8b24-e6012fc36dde req-44a9fc71-a4e3-4015-b4fc-df09415376d9 service nova] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Received unexpected event network-vif-plugged-e91dc2f6-726a-485a-9736-6de1fad4e0ae for instance with vm_state building and task_state spawning. [ 685.472412] env[69994]: DEBUG oslo_concurrency.lockutils [req-7aea4cb9-32a0-4f76-ad94-d30d51ebcf50 req-f42d7f2e-20a7-4d8f-88f6-2246654dce18 service nova] Releasing lock "refresh_cache-7f66a148-86fe-4ddc-b8ed-6e6a306bbc24" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 685.556880] env[69994]: DEBUG oslo_vmware.api [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Task: {'id': task-3241454, 'name': Rename_Task, 'duration_secs': 0.382119} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.560009] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 685.560309] env[69994]: DEBUG oslo_vmware.api [None req-0d56f690-5a19-48c2-87a9-1740fed61a6a tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Task: {'id': task-3241455, 'name': PowerOffVM_Task, 'duration_secs': 0.451134} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.560884] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1222eba4-1f27-49ad-a733-ca7c710e3afe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.562488] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d56f690-5a19-48c2-87a9-1740fed61a6a tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 685.563697] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0d56f690-5a19-48c2-87a9-1740fed61a6a tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 685.563697] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-93340329-3ae8-4b70-93a7-3e38c088b2c4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.569824] env[69994]: DEBUG oslo_vmware.api [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Waiting for the task: (returnval){ [ 685.569824] env[69994]: value = "task-3241457" [ 685.569824] env[69994]: _type = "Task" [ 685.569824] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.577940] env[69994]: DEBUG oslo_vmware.api [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Task: {'id': task-3241457, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.629187] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0d56f690-5a19-48c2-87a9-1740fed61a6a tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 685.629364] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0d56f690-5a19-48c2-87a9-1740fed61a6a tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 685.629592] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d56f690-5a19-48c2-87a9-1740fed61a6a tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Deleting the datastore file [datastore2] 84bff4c0-9e2e-47f2-a378-70d3c992b58b {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 685.629880] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-db0e9411-e771-4b77-9c7d-822f9002e018 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.636572] env[69994]: DEBUG oslo_vmware.api [None req-0d56f690-5a19-48c2-87a9-1740fed61a6a tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Waiting for the task: (returnval){ [ 685.636572] env[69994]: value = "task-3241459" [ 685.636572] env[69994]: _type = "Task" [ 685.636572] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.645386] env[69994]: DEBUG oslo_vmware.api [None req-0d56f690-5a19-48c2-87a9-1740fed61a6a tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Task: {'id': task-3241459, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.716958] env[69994]: DEBUG oslo_vmware.api [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241456, 'name': Destroy_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.817299] env[69994]: DEBUG oslo_concurrency.lockutils [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Acquiring lock "refresh_cache-6e8286a3-6fd1-44ee-a5ca-b21f3178334d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.817507] env[69994]: DEBUG oslo_concurrency.lockutils [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Acquired lock "refresh_cache-6e8286a3-6fd1-44ee-a5ca-b21f3178334d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 685.817658] env[69994]: DEBUG nova.network.neutron [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 685.818808] env[69994]: DEBUG oslo_vmware.api [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3241453, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.579818} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.819255] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24/7f66a148-86fe-4ddc-b8ed-6e6a306bbc24.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 685.819463] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 685.819707] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8d3eadce-d759-43db-acef-093ab95c8b19 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.825389] env[69994]: DEBUG oslo_vmware.api [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for the task: (returnval){ [ 685.825389] env[69994]: value = "task-3241460" [ 685.825389] env[69994]: _type = "Task" [ 685.825389] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.836748] env[69994]: DEBUG oslo_vmware.api [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3241460, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.895419] env[69994]: DEBUG nova.scheduler.client.report [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Updated inventory for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with generation 52 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 685.895703] env[69994]: DEBUG nova.compute.provider_tree [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Updating resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 generation from 52 to 53 during operation: update_inventory {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 685.895883] env[69994]: DEBUG nova.compute.provider_tree [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 686.080667] env[69994]: DEBUG oslo_vmware.api [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Task: {'id': task-3241457, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.145407] env[69994]: DEBUG oslo_vmware.api [None req-0d56f690-5a19-48c2-87a9-1740fed61a6a tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Task: {'id': task-3241459, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173816} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.146736] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d56f690-5a19-48c2-87a9-1740fed61a6a tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 686.146736] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0d56f690-5a19-48c2-87a9-1740fed61a6a tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 686.146736] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0d56f690-5a19-48c2-87a9-1740fed61a6a tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 686.146736] env[69994]: INFO nova.compute.manager [None req-0d56f690-5a19-48c2-87a9-1740fed61a6a tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Took 1.14 seconds to destroy the instance on the hypervisor. [ 686.146917] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0d56f690-5a19-48c2-87a9-1740fed61a6a tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 686.146990] env[69994]: DEBUG nova.compute.manager [-] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 686.147107] env[69994]: DEBUG nova.network.neutron [-] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 686.217959] env[69994]: DEBUG oslo_vmware.api [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241456, 'name': Destroy_Task, 'duration_secs': 0.902127} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.218273] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Destroyed the VM [ 686.218511] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Deleting Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 686.218767] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-023ee332-0384-4429-93b5-50dd77d13c67 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.225449] env[69994]: DEBUG oslo_vmware.api [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Waiting for the task: (returnval){ [ 686.225449] env[69994]: value = "task-3241461" [ 686.225449] env[69994]: _type = "Task" [ 686.225449] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.233362] env[69994]: DEBUG oslo_vmware.api [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241461, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.335376] env[69994]: DEBUG oslo_vmware.api [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3241460, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080747} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.335883] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 686.336383] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dca7de29-19f9-4f6f-9732-4f508d6f55ed {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.359120] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Reconfiguring VM instance instance-00000019 to attach disk [datastore2] 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24/7f66a148-86fe-4ddc-b8ed-6e6a306bbc24.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 686.359326] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-525e8939-f627-42da-8391-ba326bc34abc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.374102] env[69994]: DEBUG nova.network.neutron [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 686.382548] env[69994]: DEBUG oslo_vmware.api [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for the task: (returnval){ [ 686.382548] env[69994]: value = "task-3241462" [ 686.382548] env[69994]: _type = "Task" [ 686.382548] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.390667] env[69994]: DEBUG oslo_vmware.api [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3241462, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.401642] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.782s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 686.404116] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.542s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 686.405647] env[69994]: INFO nova.compute.claims [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 686.477271] env[69994]: INFO nova.scheduler.client.report [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Deleted allocations for instance f109c803-bf37-4845-8956-4336dbc8a946 [ 686.583477] env[69994]: DEBUG oslo_vmware.api [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Task: {'id': task-3241457, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.661956] env[69994]: DEBUG oslo_concurrency.lockutils [None req-354c995b-f30f-4d82-86f7-441e948afa00 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Acquiring lock "ad957c30-c923-4bbf-8841-00e99de44781" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 686.662437] env[69994]: DEBUG oslo_concurrency.lockutils [None req-354c995b-f30f-4d82-86f7-441e948afa00 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Lock "ad957c30-c923-4bbf-8841-00e99de44781" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 686.662789] env[69994]: DEBUG oslo_concurrency.lockutils [None req-354c995b-f30f-4d82-86f7-441e948afa00 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Acquiring lock "ad957c30-c923-4bbf-8841-00e99de44781-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 686.663636] env[69994]: DEBUG oslo_concurrency.lockutils [None req-354c995b-f30f-4d82-86f7-441e948afa00 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Lock "ad957c30-c923-4bbf-8841-00e99de44781-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 686.663636] env[69994]: DEBUG oslo_concurrency.lockutils [None req-354c995b-f30f-4d82-86f7-441e948afa00 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Lock "ad957c30-c923-4bbf-8841-00e99de44781-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 686.668073] env[69994]: INFO nova.compute.manager [None req-354c995b-f30f-4d82-86f7-441e948afa00 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Terminating instance [ 686.673691] env[69994]: DEBUG nova.network.neutron [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Updating instance_info_cache with network_info: [{"id": "e91dc2f6-726a-485a-9736-6de1fad4e0ae", "address": "fa:16:3e:76:28:9a", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.251", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape91dc2f6-72", "ovs_interfaceid": "e91dc2f6-726a-485a-9736-6de1fad4e0ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.735733] env[69994]: DEBUG oslo_vmware.api [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241461, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.893107] env[69994]: DEBUG oslo_vmware.api [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3241462, 'name': ReconfigVM_Task, 'duration_secs': 0.277368} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.893395] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Reconfigured VM instance instance-00000019 to attach disk [datastore2] 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24/7f66a148-86fe-4ddc-b8ed-6e6a306bbc24.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 686.894028] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-091930ee-1012-420e-92d5-e06800703777 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.900857] env[69994]: DEBUG oslo_vmware.api [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for the task: (returnval){ [ 686.900857] env[69994]: value = "task-3241463" [ 686.900857] env[69994]: _type = "Task" [ 686.900857] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.910550] env[69994]: DEBUG oslo_vmware.api [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3241463, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.932097] env[69994]: DEBUG nova.network.neutron [-] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.985929] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9bb546d1-f76d-4b93-b94a-2145ef4abea1 tempest-ServerAddressesNegativeTestJSON-1804165610 tempest-ServerAddressesNegativeTestJSON-1804165610-project-member] Lock "f109c803-bf37-4845-8956-4336dbc8a946" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.493s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 687.086505] env[69994]: DEBUG oslo_vmware.api [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Task: {'id': task-3241457, 'name': PowerOnVM_Task, 'duration_secs': 1.229595} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.086797] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 687.087095] env[69994]: INFO nova.compute.manager [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Took 12.01 seconds to spawn the instance on the hypervisor. [ 687.087427] env[69994]: DEBUG nova.compute.manager [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 687.088580] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d46b5945-e976-4dc0-b347-aa815422acac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.173452] env[69994]: DEBUG nova.compute.manager [None req-354c995b-f30f-4d82-86f7-441e948afa00 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 687.173578] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-354c995b-f30f-4d82-86f7-441e948afa00 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 687.174590] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b045411-a257-4ddb-90bf-c6ee47b2bfd3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.178702] env[69994]: DEBUG oslo_concurrency.lockutils [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Releasing lock "refresh_cache-6e8286a3-6fd1-44ee-a5ca-b21f3178334d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 687.179050] env[69994]: DEBUG nova.compute.manager [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Instance network_info: |[{"id": "e91dc2f6-726a-485a-9736-6de1fad4e0ae", "address": "fa:16:3e:76:28:9a", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.251", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape91dc2f6-72", "ovs_interfaceid": "e91dc2f6-726a-485a-9736-6de1fad4e0ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 687.179477] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:76:28:9a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0cd5d325-3053-407e-a4ee-f627e82a23f9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e91dc2f6-726a-485a-9736-6de1fad4e0ae', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 687.188255] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 687.190263] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 687.190354] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-11dd78ca-8f57-4f17-b550-9e0a71deb8a2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.209052] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-354c995b-f30f-4d82-86f7-441e948afa00 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 687.209824] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-274224dc-d086-4a4c-b557-5c6316649c3a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.214279] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 687.214279] env[69994]: value = "task-3241464" [ 687.214279] env[69994]: _type = "Task" [ 687.214279] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.223821] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241464, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.236518] env[69994]: DEBUG oslo_vmware.api [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241461, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.282167] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-354c995b-f30f-4d82-86f7-441e948afa00 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 687.282414] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-354c995b-f30f-4d82-86f7-441e948afa00 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 687.282641] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-354c995b-f30f-4d82-86f7-441e948afa00 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Deleting the datastore file [datastore1] ad957c30-c923-4bbf-8841-00e99de44781 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 687.282908] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2ae0ee77-4106-4acb-8ce4-e32ca919766c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.290084] env[69994]: DEBUG oslo_vmware.api [None req-354c995b-f30f-4d82-86f7-441e948afa00 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Waiting for the task: (returnval){ [ 687.290084] env[69994]: value = "task-3241466" [ 687.290084] env[69994]: _type = "Task" [ 687.290084] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.306668] env[69994]: DEBUG oslo_vmware.api [None req-354c995b-f30f-4d82-86f7-441e948afa00 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241466, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.412632] env[69994]: DEBUG oslo_vmware.api [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3241463, 'name': Rename_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.435031] env[69994]: INFO nova.compute.manager [-] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Took 1.29 seconds to deallocate network for instance. [ 687.591292] env[69994]: DEBUG nova.compute.manager [req-4bbb8d81-44b6-4de9-bd1d-d765a1b8d325 req-66f3ca5a-f6d4-496f-8023-466b8a9fd777 service nova] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Received event network-changed-e91dc2f6-726a-485a-9736-6de1fad4e0ae {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 687.594026] env[69994]: DEBUG nova.compute.manager [req-4bbb8d81-44b6-4de9-bd1d-d765a1b8d325 req-66f3ca5a-f6d4-496f-8023-466b8a9fd777 service nova] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Refreshing instance network info cache due to event network-changed-e91dc2f6-726a-485a-9736-6de1fad4e0ae. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 687.594026] env[69994]: DEBUG oslo_concurrency.lockutils [req-4bbb8d81-44b6-4de9-bd1d-d765a1b8d325 req-66f3ca5a-f6d4-496f-8023-466b8a9fd777 service nova] Acquiring lock "refresh_cache-6e8286a3-6fd1-44ee-a5ca-b21f3178334d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.594026] env[69994]: DEBUG oslo_concurrency.lockutils [req-4bbb8d81-44b6-4de9-bd1d-d765a1b8d325 req-66f3ca5a-f6d4-496f-8023-466b8a9fd777 service nova] Acquired lock "refresh_cache-6e8286a3-6fd1-44ee-a5ca-b21f3178334d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 687.594026] env[69994]: DEBUG nova.network.neutron [req-4bbb8d81-44b6-4de9-bd1d-d765a1b8d325 req-66f3ca5a-f6d4-496f-8023-466b8a9fd777 service nova] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Refreshing network info cache for port e91dc2f6-726a-485a-9736-6de1fad4e0ae {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 687.620978] env[69994]: INFO nova.compute.manager [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Took 38.61 seconds to build instance. [ 687.732689] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241464, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.740676] env[69994]: DEBUG oslo_vmware.api [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241461, 'name': RemoveSnapshot_Task, 'duration_secs': 1.087} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.744023] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Deleted Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 687.744023] env[69994]: INFO nova.compute.manager [None req-c6b8374a-6207-4a96-918f-a84249f75880 tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Took 18.06 seconds to snapshot the instance on the hypervisor. [ 687.805256] env[69994]: DEBUG oslo_vmware.api [None req-354c995b-f30f-4d82-86f7-441e948afa00 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241466, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.477913} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.805256] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-354c995b-f30f-4d82-86f7-441e948afa00 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 687.805256] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-354c995b-f30f-4d82-86f7-441e948afa00 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 687.805256] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-354c995b-f30f-4d82-86f7-441e948afa00 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 687.805256] env[69994]: INFO nova.compute.manager [None req-354c995b-f30f-4d82-86f7-441e948afa00 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Took 0.63 seconds to destroy the instance on the hypervisor. [ 687.805465] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-354c995b-f30f-4d82-86f7-441e948afa00 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 687.807312] env[69994]: DEBUG nova.compute.manager [-] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 687.807588] env[69994]: DEBUG nova.network.neutron [-] [instance: ad957c30-c923-4bbf-8841-00e99de44781] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 687.915771] env[69994]: DEBUG oslo_vmware.api [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3241463, 'name': Rename_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.950200] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0d56f690-5a19-48c2-87a9-1740fed61a6a tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 688.079652] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52e8ecd1-8522-41a7-80d6-f9651010ce32 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.096406] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8f1b512-a904-4e8c-a8a9-06af2a0b23ca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.127836] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d9e986ea-5ac9-4995-8374-fb5c664298bf tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Lock "cef66a67-e3ac-40dc-a8a4-0375bd64c484" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.883s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 688.131428] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f112163-ac47-4115-ad01-680ad3a9ab6b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.140598] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39e1fafd-f008-46a6-8ed1-0872afb8e349 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.158594] env[69994]: DEBUG nova.compute.provider_tree [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 688.228478] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241464, 'name': CreateVM_Task, 'duration_secs': 0.770852} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.228478] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 688.232719] env[69994]: DEBUG oslo_concurrency.lockutils [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 688.232719] env[69994]: DEBUG oslo_concurrency.lockutils [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 688.232719] env[69994]: DEBUG oslo_concurrency.lockutils [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 688.232719] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7065ece-539e-44ff-8aef-058d1a1149cd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.238868] env[69994]: DEBUG oslo_vmware.api [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Waiting for the task: (returnval){ [ 688.238868] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52bf152b-aa4c-3d72-c4ae-7df84bb5f2cb" [ 688.238868] env[69994]: _type = "Task" [ 688.238868] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.248617] env[69994]: DEBUG oslo_vmware.api [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52bf152b-aa4c-3d72-c4ae-7df84bb5f2cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.415019] env[69994]: DEBUG oslo_vmware.api [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3241463, 'name': Rename_Task, 'duration_secs': 1.338469} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.415019] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 688.415019] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8de78c7d-1d21-4e74-9104-18fc3b2511c6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.424072] env[69994]: DEBUG oslo_vmware.api [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for the task: (returnval){ [ 688.424072] env[69994]: value = "task-3241467" [ 688.424072] env[69994]: _type = "Task" [ 688.424072] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.434236] env[69994]: DEBUG oslo_vmware.api [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3241467, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.499794] env[69994]: DEBUG nova.compute.manager [req-26d856bb-067c-4ce4-b4ba-1f1d0633f42f req-bcbbd80d-f520-4669-a696-19c9df2497c2 service nova] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Received event network-vif-deleted-68ff6b46-787c-4682-8626-0a9c421f1cd7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 688.499897] env[69994]: INFO nova.compute.manager [req-26d856bb-067c-4ce4-b4ba-1f1d0633f42f req-bcbbd80d-f520-4669-a696-19c9df2497c2 service nova] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Neutron deleted interface 68ff6b46-787c-4682-8626-0a9c421f1cd7; detaching it from the instance and deleting it from the info cache [ 688.500084] env[69994]: DEBUG nova.network.neutron [req-26d856bb-067c-4ce4-b4ba-1f1d0633f42f req-bcbbd80d-f520-4669-a696-19c9df2497c2 service nova] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.613843] env[69994]: DEBUG nova.network.neutron [req-4bbb8d81-44b6-4de9-bd1d-d765a1b8d325 req-66f3ca5a-f6d4-496f-8023-466b8a9fd777 service nova] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Updated VIF entry in instance network info cache for port e91dc2f6-726a-485a-9736-6de1fad4e0ae. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 688.614248] env[69994]: DEBUG nova.network.neutron [req-4bbb8d81-44b6-4de9-bd1d-d765a1b8d325 req-66f3ca5a-f6d4-496f-8023-466b8a9fd777 service nova] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Updating instance_info_cache with network_info: [{"id": "e91dc2f6-726a-485a-9736-6de1fad4e0ae", "address": "fa:16:3e:76:28:9a", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.251", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape91dc2f6-72", "ovs_interfaceid": "e91dc2f6-726a-485a-9736-6de1fad4e0ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.637031] env[69994]: DEBUG nova.compute.manager [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 688.661739] env[69994]: DEBUG nova.scheduler.client.report [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 688.722407] env[69994]: DEBUG nova.network.neutron [-] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.750512] env[69994]: DEBUG oslo_vmware.api [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52bf152b-aa4c-3d72-c4ae-7df84bb5f2cb, 'name': SearchDatastore_Task, 'duration_secs': 0.010065} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.752204] env[69994]: DEBUG oslo_concurrency.lockutils [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 688.752204] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 688.752204] env[69994]: DEBUG oslo_concurrency.lockutils [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 688.752204] env[69994]: DEBUG oslo_concurrency.lockutils [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 688.752838] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 688.752838] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1c678622-8ea5-4026-8f17-ffd9b58104b8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.763853] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 688.764097] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 688.765194] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7432589-10cd-42c7-990b-5bea70d190df {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.770761] env[69994]: DEBUG oslo_vmware.api [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Waiting for the task: (returnval){ [ 688.770761] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]527926b3-ba5d-eefe-2a8d-778c0928edf6" [ 688.770761] env[69994]: _type = "Task" [ 688.770761] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.779044] env[69994]: DEBUG oslo_vmware.api [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]527926b3-ba5d-eefe-2a8d-778c0928edf6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.937590] env[69994]: DEBUG oslo_vmware.api [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3241467, 'name': PowerOnVM_Task, 'duration_secs': 0.48764} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.937881] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 688.945021] env[69994]: INFO nova.compute.manager [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Took 11.03 seconds to spawn the instance on the hypervisor. [ 688.945021] env[69994]: DEBUG nova.compute.manager [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 688.945021] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cb3a61b-107d-49f9-869d-06f86133fce6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.004309] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8bf7a743-f9da-445d-89e6-416a053f5da0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.017660] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f95cf31-6346-494a-ad13-e5c0ec881b47 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.045878] env[69994]: DEBUG nova.compute.manager [req-26d856bb-067c-4ce4-b4ba-1f1d0633f42f req-bcbbd80d-f520-4669-a696-19c9df2497c2 service nova] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Detach interface failed, port_id=68ff6b46-787c-4682-8626-0a9c421f1cd7, reason: Instance ad957c30-c923-4bbf-8841-00e99de44781 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 689.118515] env[69994]: DEBUG oslo_concurrency.lockutils [req-4bbb8d81-44b6-4de9-bd1d-d765a1b8d325 req-66f3ca5a-f6d4-496f-8023-466b8a9fd777 service nova] Releasing lock "refresh_cache-6e8286a3-6fd1-44ee-a5ca-b21f3178334d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 689.118515] env[69994]: DEBUG nova.compute.manager [req-4bbb8d81-44b6-4de9-bd1d-d765a1b8d325 req-66f3ca5a-f6d4-496f-8023-466b8a9fd777 service nova] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Received event network-vif-deleted-645be6ca-7ed3-4e18-affb-c05101e3a42f {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 689.176423] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.772s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 689.176838] env[69994]: DEBUG nova.compute.manager [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 689.180393] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.655s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 689.182038] env[69994]: INFO nova.compute.claims [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 689.185842] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 689.225985] env[69994]: INFO nova.compute.manager [-] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Took 1.42 seconds to deallocate network for instance. [ 689.284277] env[69994]: DEBUG oslo_vmware.api [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]527926b3-ba5d-eefe-2a8d-778c0928edf6, 'name': SearchDatastore_Task, 'duration_secs': 0.009169} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.287306] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6afff4f-ff5d-4cd5-9aae-13342999ac94 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.291094] env[69994]: DEBUG oslo_vmware.api [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Waiting for the task: (returnval){ [ 689.291094] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]522dfa6b-80f7-2574-00bc-29657e4262a1" [ 689.291094] env[69994]: _type = "Task" [ 689.291094] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.299873] env[69994]: DEBUG oslo_vmware.api [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]522dfa6b-80f7-2574-00bc-29657e4262a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.467558] env[69994]: INFO nova.compute.manager [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Took 37.75 seconds to build instance. [ 689.686613] env[69994]: DEBUG nova.compute.utils [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 689.688900] env[69994]: DEBUG nova.compute.manager [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 689.688900] env[69994]: DEBUG nova.network.neutron [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 689.733447] env[69994]: DEBUG oslo_concurrency.lockutils [None req-354c995b-f30f-4d82-86f7-441e948afa00 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 689.772148] env[69994]: DEBUG nova.policy [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3f0060201d794c97a2a8b70ab1ce4ef3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '43b24523ecd9423d84fa86385f970562', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 689.804754] env[69994]: DEBUG oslo_vmware.api [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]522dfa6b-80f7-2574-00bc-29657e4262a1, 'name': SearchDatastore_Task, 'duration_secs': 0.010314} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.805227] env[69994]: DEBUG oslo_concurrency.lockutils [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 689.805648] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 6e8286a3-6fd1-44ee-a5ca-b21f3178334d/6e8286a3-6fd1-44ee-a5ca-b21f3178334d.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 689.806057] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b23cf0dc-dcf6-44d3-bd58-3750661e0552 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.817036] env[69994]: DEBUG oslo_vmware.api [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Waiting for the task: (returnval){ [ 689.817036] env[69994]: value = "task-3241468" [ 689.817036] env[69994]: _type = "Task" [ 689.817036] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.824636] env[69994]: DEBUG nova.compute.manager [req-3d7e7454-6303-4a5a-9594-0d13d12d6127 req-c00b9c9a-f08d-4f65-b261-9cb154be8297 service nova] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Received event network-changed-7766d3a6-c9e9-46c7-ae9d-3e22ffcc98a0 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 689.825512] env[69994]: DEBUG nova.compute.manager [req-3d7e7454-6303-4a5a-9594-0d13d12d6127 req-c00b9c9a-f08d-4f65-b261-9cb154be8297 service nova] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Refreshing instance network info cache due to event network-changed-7766d3a6-c9e9-46c7-ae9d-3e22ffcc98a0. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 689.831305] env[69994]: DEBUG oslo_concurrency.lockutils [req-3d7e7454-6303-4a5a-9594-0d13d12d6127 req-c00b9c9a-f08d-4f65-b261-9cb154be8297 service nova] Acquiring lock "refresh_cache-cef66a67-e3ac-40dc-a8a4-0375bd64c484" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 689.831305] env[69994]: DEBUG oslo_concurrency.lockutils [req-3d7e7454-6303-4a5a-9594-0d13d12d6127 req-c00b9c9a-f08d-4f65-b261-9cb154be8297 service nova] Acquired lock "refresh_cache-cef66a67-e3ac-40dc-a8a4-0375bd64c484" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 689.831305] env[69994]: DEBUG nova.network.neutron [req-3d7e7454-6303-4a5a-9594-0d13d12d6127 req-c00b9c9a-f08d-4f65-b261-9cb154be8297 service nova] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Refreshing network info cache for port 7766d3a6-c9e9-46c7-ae9d-3e22ffcc98a0 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 689.836216] env[69994]: DEBUG oslo_vmware.api [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Task: {'id': task-3241468, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.972533] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ad3d4510-f26f-4911-816f-67597ca983e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "7f66a148-86fe-4ddc-b8ed-6e6a306bbc24" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.074s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 690.193705] env[69994]: DEBUG nova.compute.manager [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 690.327842] env[69994]: DEBUG oslo_vmware.api [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Task: {'id': task-3241468, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.501314} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.327842] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 6e8286a3-6fd1-44ee-a5ca-b21f3178334d/6e8286a3-6fd1-44ee-a5ca-b21f3178334d.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 690.327842] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 690.328115] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-786a21c0-8df4-4cc4-bd9b-920b92545abb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.337435] env[69994]: DEBUG oslo_vmware.api [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Waiting for the task: (returnval){ [ 690.337435] env[69994]: value = "task-3241469" [ 690.337435] env[69994]: _type = "Task" [ 690.337435] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.351655] env[69994]: DEBUG oslo_vmware.api [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Task: {'id': task-3241469, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.477803] env[69994]: DEBUG nova.compute.manager [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 690.796684] env[69994]: DEBUG nova.network.neutron [req-3d7e7454-6303-4a5a-9594-0d13d12d6127 req-c00b9c9a-f08d-4f65-b261-9cb154be8297 service nova] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Updated VIF entry in instance network info cache for port 7766d3a6-c9e9-46c7-ae9d-3e22ffcc98a0. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 690.796684] env[69994]: DEBUG nova.network.neutron [req-3d7e7454-6303-4a5a-9594-0d13d12d6127 req-c00b9c9a-f08d-4f65-b261-9cb154be8297 service nova] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Updating instance_info_cache with network_info: [{"id": "7766d3a6-c9e9-46c7-ae9d-3e22ffcc98a0", "address": "fa:16:3e:68:a6:86", "network": {"id": "134b397d-887f-40ba-941f-c04da9756c8f", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-692510832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.244", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b29d9865d614bbcba48367777782afd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98d96b75-ac36-499a-adc2-130c8c1d55ca", "external-id": "nsx-vlan-transportzone-564", "segmentation_id": 564, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7766d3a6-c9", "ovs_interfaceid": "7766d3a6-c9e9-46c7-ae9d-3e22ffcc98a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 690.851350] env[69994]: DEBUG oslo_vmware.api [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Task: {'id': task-3241469, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.160032} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.853172] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 690.856299] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95e8c590-4bca-45f3-a1c8-442c28502c6b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.880612] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Reconfiguring VM instance instance-0000001a to attach disk [datastore2] 6e8286a3-6fd1-44ee-a5ca-b21f3178334d/6e8286a3-6fd1-44ee-a5ca-b21f3178334d.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 690.887446] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-58162626-e2bb-440d-8728-e73e0d78f67c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.906130] env[69994]: DEBUG oslo_vmware.api [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Waiting for the task: (returnval){ [ 690.906130] env[69994]: value = "task-3241470" [ 690.906130] env[69994]: _type = "Task" [ 690.906130] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.919024] env[69994]: DEBUG oslo_vmware.api [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Task: {'id': task-3241470, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.941018] env[69994]: DEBUG nova.network.neutron [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Successfully created port: e90f5624-00b3-4a23-bedc-663e89b666df {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 690.945463] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f946fe43-280f-417e-8a71-63566074e075 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.954647] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-894764c6-2ac3-43a9-b035-a3696fda3228 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.992598] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11b2da01-806a-42ce-8b9b-48412f34f6fb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.003498] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e193035-7fb5-48d0-9d82-87d179293601 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.016469] env[69994]: DEBUG nova.compute.provider_tree [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 691.019385] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 691.209766] env[69994]: DEBUG nova.compute.manager [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 691.248746] env[69994]: DEBUG nova.virt.hardware [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 691.249077] env[69994]: DEBUG nova.virt.hardware [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 691.249763] env[69994]: DEBUG nova.virt.hardware [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 691.249969] env[69994]: DEBUG nova.virt.hardware [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 691.250202] env[69994]: DEBUG nova.virt.hardware [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 691.250397] env[69994]: DEBUG nova.virt.hardware [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 691.250652] env[69994]: DEBUG nova.virt.hardware [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 691.250851] env[69994]: DEBUG nova.virt.hardware [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 691.251069] env[69994]: DEBUG nova.virt.hardware [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 691.251379] env[69994]: DEBUG nova.virt.hardware [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 691.251539] env[69994]: DEBUG nova.virt.hardware [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 691.252456] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc666c76-e5e5-437e-81e2-f6f22d3f0db4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.260381] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b836fbf1-74b2-49b3-b5d3-4dbae3670126 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.409146] env[69994]: DEBUG oslo_concurrency.lockutils [req-3d7e7454-6303-4a5a-9594-0d13d12d6127 req-c00b9c9a-f08d-4f65-b261-9cb154be8297 service nova] Releasing lock "refresh_cache-cef66a67-e3ac-40dc-a8a4-0375bd64c484" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 691.416672] env[69994]: DEBUG oslo_vmware.api [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Task: {'id': task-3241470, 'name': ReconfigVM_Task, 'duration_secs': 0.294724} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.416946] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Reconfigured VM instance instance-0000001a to attach disk [datastore2] 6e8286a3-6fd1-44ee-a5ca-b21f3178334d/6e8286a3-6fd1-44ee-a5ca-b21f3178334d.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 691.417688] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dc5e2b84-7391-4505-86c1-1c6c0318af9f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.424663] env[69994]: DEBUG oslo_vmware.api [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Waiting for the task: (returnval){ [ 691.424663] env[69994]: value = "task-3241471" [ 691.424663] env[69994]: _type = "Task" [ 691.424663] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.436289] env[69994]: DEBUG oslo_vmware.api [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Task: {'id': task-3241471, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.520105] env[69994]: DEBUG nova.scheduler.client.report [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 691.838635] env[69994]: DEBUG nova.compute.manager [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 691.839665] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-559b7a0a-a982-4919-9376-519703183971 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.855080] env[69994]: DEBUG nova.compute.manager [req-8a284389-1bd7-487e-8091-d19c37d30732 req-3762c35a-42f1-4127-8b88-0f71e3c7e2d0 service nova] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Received event network-changed-f2652bdf-bba7-4a73-9045-397e55945ed1 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 691.855389] env[69994]: DEBUG nova.compute.manager [req-8a284389-1bd7-487e-8091-d19c37d30732 req-3762c35a-42f1-4127-8b88-0f71e3c7e2d0 service nova] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Refreshing instance network info cache due to event network-changed-f2652bdf-bba7-4a73-9045-397e55945ed1. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 691.855741] env[69994]: DEBUG oslo_concurrency.lockutils [req-8a284389-1bd7-487e-8091-d19c37d30732 req-3762c35a-42f1-4127-8b88-0f71e3c7e2d0 service nova] Acquiring lock "refresh_cache-7f66a148-86fe-4ddc-b8ed-6e6a306bbc24" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.855910] env[69994]: DEBUG oslo_concurrency.lockutils [req-8a284389-1bd7-487e-8091-d19c37d30732 req-3762c35a-42f1-4127-8b88-0f71e3c7e2d0 service nova] Acquired lock "refresh_cache-7f66a148-86fe-4ddc-b8ed-6e6a306bbc24" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 691.856138] env[69994]: DEBUG nova.network.neutron [req-8a284389-1bd7-487e-8091-d19c37d30732 req-3762c35a-42f1-4127-8b88-0f71e3c7e2d0 service nova] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Refreshing network info cache for port f2652bdf-bba7-4a73-9045-397e55945ed1 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 691.934536] env[69994]: DEBUG oslo_vmware.api [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Task: {'id': task-3241471, 'name': Rename_Task, 'duration_secs': 0.137878} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.934841] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 691.935125] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ec07b712-e1bd-4ef0-90ce-a1b59063e047 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.941945] env[69994]: DEBUG oslo_vmware.api [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Waiting for the task: (returnval){ [ 691.941945] env[69994]: value = "task-3241472" [ 691.941945] env[69994]: _type = "Task" [ 691.941945] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.949999] env[69994]: DEBUG oslo_vmware.api [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Task: {'id': task-3241472, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.026329] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.846s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 692.026858] env[69994]: DEBUG nova.compute.manager [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 692.029802] env[69994]: DEBUG oslo_concurrency.lockutils [None req-024ddc30-cada-45d8-8427-0c45b266bd2e tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.405s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 692.030601] env[69994]: DEBUG nova.objects.instance [None req-024ddc30-cada-45d8-8427-0c45b266bd2e tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Lazy-loading 'resources' on Instance uuid 2f710439-0216-401e-9759-af584f9bd00d {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 692.352019] env[69994]: INFO nova.compute.manager [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] instance snapshotting [ 692.358334] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c949639-a0ee-42b9-b60e-f5ba0ae76439 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.383125] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a3cb652-8812-4c90-9486-cd0746f280df {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.454413] env[69994]: DEBUG oslo_vmware.api [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Task: {'id': task-3241472, 'name': PowerOnVM_Task, 'duration_secs': 0.466339} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.455616] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 692.455829] env[69994]: INFO nova.compute.manager [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Took 7.80 seconds to spawn the instance on the hypervisor. [ 692.456015] env[69994]: DEBUG nova.compute.manager [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 692.456828] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-596b23af-c59f-42bb-a994-ab1cf524e702 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.533527] env[69994]: DEBUG nova.compute.utils [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 692.537961] env[69994]: DEBUG nova.compute.manager [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 692.538158] env[69994]: DEBUG nova.network.neutron [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 692.718021] env[69994]: DEBUG nova.policy [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9cc52f7cc9e545e68464a382a2d5198e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9d5dcd4c69bb4d6db17dcfa3a7ace979', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 692.894800] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Creating Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 692.899623] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e8180858-3a64-4891-b92d-806b93c840f3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.913565] env[69994]: DEBUG oslo_vmware.api [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Waiting for the task: (returnval){ [ 692.913565] env[69994]: value = "task-3241473" [ 692.913565] env[69994]: _type = "Task" [ 692.913565] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.921616] env[69994]: DEBUG oslo_vmware.api [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241473, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.990321] env[69994]: INFO nova.compute.manager [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Took 39.29 seconds to build instance. [ 692.995443] env[69994]: DEBUG nova.network.neutron [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Successfully updated port: e90f5624-00b3-4a23-bedc-663e89b666df {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 693.039465] env[69994]: DEBUG nova.compute.manager [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 693.116914] env[69994]: DEBUG nova.network.neutron [req-8a284389-1bd7-487e-8091-d19c37d30732 req-3762c35a-42f1-4127-8b88-0f71e3c7e2d0 service nova] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Updated VIF entry in instance network info cache for port f2652bdf-bba7-4a73-9045-397e55945ed1. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 693.116914] env[69994]: DEBUG nova.network.neutron [req-8a284389-1bd7-487e-8091-d19c37d30732 req-3762c35a-42f1-4127-8b88-0f71e3c7e2d0 service nova] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Updating instance_info_cache with network_info: [{"id": "f2652bdf-bba7-4a73-9045-397e55945ed1", "address": "fa:16:3e:c5:70:d6", "network": {"id": "6e027deb-e9dd-4d16-a0cf-5c75f1d53722", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1614407267-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.188", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7799f51750bb4c2589042a3b7bc8af01", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721c6720-3ce0-450e-9951-a894f03acc27", "external-id": "nsx-vlan-transportzone-394", "segmentation_id": 394, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2652bdf-bb", "ovs_interfaceid": "f2652bdf-bba7-4a73-9045-397e55945ed1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.183645] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c13108b9-baf8-432f-853b-6b549e923ad0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.194287] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b3f5093-9a4e-47b8-8424-911ed7bdecfc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.232951] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aae47f96-d014-4998-a5ae-dbacb1674d1d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.245232] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86818cda-ad8c-4f7c-b019-b1a84ddd3e94 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.258775] env[69994]: DEBUG nova.compute.provider_tree [None req-024ddc30-cada-45d8-8427-0c45b266bd2e tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 693.425273] env[69994]: DEBUG oslo_vmware.api [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241473, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.428798] env[69994]: DEBUG nova.network.neutron [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Successfully created port: 150df766-08da-4092-b1e4-2e9c7a7cbf76 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 693.492602] env[69994]: DEBUG oslo_concurrency.lockutils [None req-961e398f-9ac3-40cc-b505-d7710e029abe tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Lock "6e8286a3-6fd1-44ee-a5ca-b21f3178334d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.765s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 693.501782] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Acquiring lock "refresh_cache-0bfe4393-5b2a-487f-ba7a-858ed4c861a5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.501782] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Acquired lock "refresh_cache-0bfe4393-5b2a-487f-ba7a-858ed4c861a5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 693.501782] env[69994]: DEBUG nova.network.neutron [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 693.617799] env[69994]: DEBUG oslo_concurrency.lockutils [req-8a284389-1bd7-487e-8091-d19c37d30732 req-3762c35a-42f1-4127-8b88-0f71e3c7e2d0 service nova] Releasing lock "refresh_cache-7f66a148-86fe-4ddc-b8ed-6e6a306bbc24" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 693.763615] env[69994]: DEBUG nova.scheduler.client.report [None req-024ddc30-cada-45d8-8427-0c45b266bd2e tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 693.895427] env[69994]: DEBUG nova.compute.manager [req-4304ef02-8b1c-4091-8fe6-40267edbed98 req-5e0c1aae-ec9e-4cbb-ba69-39cc71b0e5a7 service nova] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Received event network-vif-plugged-e90f5624-00b3-4a23-bedc-663e89b666df {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 693.895427] env[69994]: DEBUG oslo_concurrency.lockutils [req-4304ef02-8b1c-4091-8fe6-40267edbed98 req-5e0c1aae-ec9e-4cbb-ba69-39cc71b0e5a7 service nova] Acquiring lock "0bfe4393-5b2a-487f-ba7a-858ed4c861a5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 693.895718] env[69994]: DEBUG oslo_concurrency.lockutils [req-4304ef02-8b1c-4091-8fe6-40267edbed98 req-5e0c1aae-ec9e-4cbb-ba69-39cc71b0e5a7 service nova] Lock "0bfe4393-5b2a-487f-ba7a-858ed4c861a5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 693.895718] env[69994]: DEBUG oslo_concurrency.lockutils [req-4304ef02-8b1c-4091-8fe6-40267edbed98 req-5e0c1aae-ec9e-4cbb-ba69-39cc71b0e5a7 service nova] Lock "0bfe4393-5b2a-487f-ba7a-858ed4c861a5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 693.895786] env[69994]: DEBUG nova.compute.manager [req-4304ef02-8b1c-4091-8fe6-40267edbed98 req-5e0c1aae-ec9e-4cbb-ba69-39cc71b0e5a7 service nova] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] No waiting events found dispatching network-vif-plugged-e90f5624-00b3-4a23-bedc-663e89b666df {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 693.895946] env[69994]: WARNING nova.compute.manager [req-4304ef02-8b1c-4091-8fe6-40267edbed98 req-5e0c1aae-ec9e-4cbb-ba69-39cc71b0e5a7 service nova] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Received unexpected event network-vif-plugged-e90f5624-00b3-4a23-bedc-663e89b666df for instance with vm_state building and task_state spawning. [ 693.896389] env[69994]: DEBUG nova.compute.manager [req-4304ef02-8b1c-4091-8fe6-40267edbed98 req-5e0c1aae-ec9e-4cbb-ba69-39cc71b0e5a7 service nova] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Received event network-changed-e90f5624-00b3-4a23-bedc-663e89b666df {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 693.896616] env[69994]: DEBUG nova.compute.manager [req-4304ef02-8b1c-4091-8fe6-40267edbed98 req-5e0c1aae-ec9e-4cbb-ba69-39cc71b0e5a7 service nova] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Refreshing instance network info cache due to event network-changed-e90f5624-00b3-4a23-bedc-663e89b666df. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 693.896830] env[69994]: DEBUG oslo_concurrency.lockutils [req-4304ef02-8b1c-4091-8fe6-40267edbed98 req-5e0c1aae-ec9e-4cbb-ba69-39cc71b0e5a7 service nova] Acquiring lock "refresh_cache-0bfe4393-5b2a-487f-ba7a-858ed4c861a5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.924893] env[69994]: DEBUG oslo_vmware.api [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241473, 'name': CreateSnapshot_Task, 'duration_secs': 0.982962} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.925222] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Created Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 693.925979] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce47353e-e32a-4bca-827b-758c1de1b6db {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.999779] env[69994]: DEBUG nova.compute.manager [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 694.047387] env[69994]: DEBUG nova.compute.manager [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 694.096356] env[69994]: DEBUG nova.virt.hardware [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 694.096690] env[69994]: DEBUG nova.virt.hardware [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 694.096763] env[69994]: DEBUG nova.virt.hardware [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 694.096940] env[69994]: DEBUG nova.virt.hardware [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 694.097102] env[69994]: DEBUG nova.virt.hardware [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 694.097285] env[69994]: DEBUG nova.virt.hardware [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 694.097567] env[69994]: DEBUG nova.virt.hardware [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 694.097742] env[69994]: DEBUG nova.virt.hardware [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 694.097912] env[69994]: DEBUG nova.virt.hardware [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 694.098093] env[69994]: DEBUG nova.virt.hardware [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 694.098270] env[69994]: DEBUG nova.virt.hardware [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 694.099214] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b660384c-f36b-445e-b6de-7763eb114f44 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.107229] env[69994]: DEBUG nova.network.neutron [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 694.110525] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc86767d-5f35-4bd3-92fa-d40ae242625a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.268333] env[69994]: DEBUG nova.network.neutron [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Updating instance_info_cache with network_info: [{"id": "e90f5624-00b3-4a23-bedc-663e89b666df", "address": "fa:16:3e:66:ff:af", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.173", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape90f5624-00", "ovs_interfaceid": "e90f5624-00b3-4a23-bedc-663e89b666df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.269958] env[69994]: DEBUG oslo_concurrency.lockutils [None req-024ddc30-cada-45d8-8427-0c45b266bd2e tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.240s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 694.272653] env[69994]: DEBUG oslo_concurrency.lockutils [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 29.030s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 694.272839] env[69994]: DEBUG nova.objects.instance [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69994) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 694.447011] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Creating linked-clone VM from snapshot {{(pid=69994) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 694.447163] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-abcfa8a9-1dd8-4c06-8749-fd00f5b10cb7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.456739] env[69994]: DEBUG oslo_vmware.api [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Waiting for the task: (returnval){ [ 694.456739] env[69994]: value = "task-3241474" [ 694.456739] env[69994]: _type = "Task" [ 694.456739] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.465281] env[69994]: DEBUG oslo_vmware.api [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241474, 'name': CloneVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.765667] env[69994]: DEBUG oslo_concurrency.lockutils [None req-03042079-431b-4859-bd74-c9b0335c9106 tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Acquiring lock "6e8286a3-6fd1-44ee-a5ca-b21f3178334d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 694.765984] env[69994]: DEBUG oslo_concurrency.lockutils [None req-03042079-431b-4859-bd74-c9b0335c9106 tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Lock "6e8286a3-6fd1-44ee-a5ca-b21f3178334d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 694.766232] env[69994]: DEBUG oslo_concurrency.lockutils [None req-03042079-431b-4859-bd74-c9b0335c9106 tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Acquiring lock "6e8286a3-6fd1-44ee-a5ca-b21f3178334d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 694.766437] env[69994]: DEBUG oslo_concurrency.lockutils [None req-03042079-431b-4859-bd74-c9b0335c9106 tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Lock "6e8286a3-6fd1-44ee-a5ca-b21f3178334d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 694.766625] env[69994]: DEBUG oslo_concurrency.lockutils [None req-03042079-431b-4859-bd74-c9b0335c9106 tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Lock "6e8286a3-6fd1-44ee-a5ca-b21f3178334d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 694.769541] env[69994]: INFO nova.compute.manager [None req-03042079-431b-4859-bd74-c9b0335c9106 tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Terminating instance [ 694.774245] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Releasing lock "refresh_cache-0bfe4393-5b2a-487f-ba7a-858ed4c861a5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 694.774245] env[69994]: DEBUG nova.compute.manager [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Instance network_info: |[{"id": "e90f5624-00b3-4a23-bedc-663e89b666df", "address": "fa:16:3e:66:ff:af", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.173", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape90f5624-00", "ovs_interfaceid": "e90f5624-00b3-4a23-bedc-663e89b666df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 694.774546] env[69994]: DEBUG oslo_concurrency.lockutils [req-4304ef02-8b1c-4091-8fe6-40267edbed98 req-5e0c1aae-ec9e-4cbb-ba69-39cc71b0e5a7 service nova] Acquired lock "refresh_cache-0bfe4393-5b2a-487f-ba7a-858ed4c861a5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 694.774546] env[69994]: DEBUG nova.network.neutron [req-4304ef02-8b1c-4091-8fe6-40267edbed98 req-5e0c1aae-ec9e-4cbb-ba69-39cc71b0e5a7 service nova] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Refreshing network info cache for port e90f5624-00b3-4a23-bedc-663e89b666df {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 694.775374] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:66:ff:af', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0cd5d325-3053-407e-a4ee-f627e82a23f9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e90f5624-00b3-4a23-bedc-663e89b666df', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 694.788128] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Creating folder: Project (43b24523ecd9423d84fa86385f970562). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 694.790656] env[69994]: DEBUG oslo_concurrency.lockutils [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 694.792948] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f3c834ef-5a17-4526-af27-12379c6b606b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.810425] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Created folder: Project (43b24523ecd9423d84fa86385f970562) in parent group-v647729. [ 694.810425] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Creating folder: Instances. Parent ref: group-v647811. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 694.810425] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6280035b-3f2a-4732-a923-11f06e4c5c92 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.821413] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Created folder: Instances in parent group-v647811. [ 694.821413] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 694.821413] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 694.821413] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-618f187a-99ba-42f9-bfa6-94cf23b6c58c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.840223] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 694.840223] env[69994]: value = "task-3241477" [ 694.840223] env[69994]: _type = "Task" [ 694.840223] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.848555] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241477, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.969195] env[69994]: DEBUG oslo_vmware.api [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241474, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.028628] env[69994]: INFO nova.scheduler.client.report [None req-024ddc30-cada-45d8-8427-0c45b266bd2e tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Deleted allocations for instance 2f710439-0216-401e-9759-af584f9bd00d [ 695.303370] env[69994]: DEBUG nova.compute.manager [None req-03042079-431b-4859-bd74-c9b0335c9106 tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 695.303596] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-03042079-431b-4859-bd74-c9b0335c9106 tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 695.304767] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d915a2ae-36c9-42d4-8f1f-cce9c9b01424 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.318141] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-03042079-431b-4859-bd74-c9b0335c9106 tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 695.318141] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9e86af09-2aa6-426b-8d6f-b116ad64f360 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.324296] env[69994]: DEBUG oslo_vmware.api [None req-03042079-431b-4859-bd74-c9b0335c9106 tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Waiting for the task: (returnval){ [ 695.324296] env[69994]: value = "task-3241478" [ 695.324296] env[69994]: _type = "Task" [ 695.324296] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.331065] env[69994]: DEBUG oslo_vmware.api [None req-03042079-431b-4859-bd74-c9b0335c9106 tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Task: {'id': task-3241478, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.350344] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241477, 'name': CreateVM_Task, 'duration_secs': 0.465921} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.350792] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 695.351940] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.352267] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 695.352706] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 695.353082] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a473f65b-1f05-43e3-8e3e-d6566d20c849 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.361757] env[69994]: DEBUG oslo_vmware.api [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Waiting for the task: (returnval){ [ 695.361757] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52d09ecd-bb51-af5d-895c-583b878a74fc" [ 695.361757] env[69994]: _type = "Task" [ 695.361757] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.369668] env[69994]: DEBUG oslo_vmware.api [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52d09ecd-bb51-af5d-895c-583b878a74fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.470812] env[69994]: DEBUG oslo_vmware.api [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241474, 'name': CloneVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.538063] env[69994]: DEBUG oslo_concurrency.lockutils [None req-024ddc30-cada-45d8-8427-0c45b266bd2e tempest-FloatingIPsAssociationNegativeTestJSON-510142433 tempest-FloatingIPsAssociationNegativeTestJSON-510142433-project-member] Lock "2f710439-0216-401e-9759-af584f9bd00d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.946s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 695.800903] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquiring lock "ed662f67-be0e-4f19-bb8a-6af39b4d348c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 695.801273] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lock "ed662f67-be0e-4f19-bb8a-6af39b4d348c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 695.805464] env[69994]: DEBUG oslo_concurrency.lockutils [None req-678a85f0-5148-44bd-adeb-816d66273896 tempest-ServersAdmin275Test-1730721197 tempest-ServersAdmin275Test-1730721197-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.533s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 695.806465] env[69994]: DEBUG oslo_concurrency.lockutils [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.917s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 695.807864] env[69994]: INFO nova.compute.claims [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 695.816803] env[69994]: DEBUG nova.network.neutron [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Successfully updated port: 150df766-08da-4092-b1e4-2e9c7a7cbf76 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 695.833077] env[69994]: DEBUG oslo_vmware.api [None req-03042079-431b-4859-bd74-c9b0335c9106 tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Task: {'id': task-3241478, 'name': PowerOffVM_Task, 'duration_secs': 0.187349} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.833373] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-03042079-431b-4859-bd74-c9b0335c9106 tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 695.833546] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-03042079-431b-4859-bd74-c9b0335c9106 tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 695.833790] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-04500189-5cf6-4556-9672-fc5783db45b9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.872252] env[69994]: DEBUG oslo_vmware.api [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52d09ecd-bb51-af5d-895c-583b878a74fc, 'name': SearchDatastore_Task, 'duration_secs': 0.011046} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.872627] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 695.872871] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 695.873214] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.873336] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 695.873520] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 695.874196] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1a089dfc-bef3-4a1a-b819-bd3948f36af7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.882865] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 695.883132] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 695.883828] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-faecaf26-4fcb-4aad-a473-28f70d6656ec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.889315] env[69994]: DEBUG oslo_vmware.api [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Waiting for the task: (returnval){ [ 695.889315] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52e20de3-dce1-621f-9663-8ef2f6dd0bbf" [ 695.889315] env[69994]: _type = "Task" [ 695.889315] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.899073] env[69994]: DEBUG oslo_vmware.api [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52e20de3-dce1-621f-9663-8ef2f6dd0bbf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.900312] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-03042079-431b-4859-bd74-c9b0335c9106 tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 695.900522] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-03042079-431b-4859-bd74-c9b0335c9106 tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 695.900692] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-03042079-431b-4859-bd74-c9b0335c9106 tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Deleting the datastore file [datastore2] 6e8286a3-6fd1-44ee-a5ca-b21f3178334d {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 695.900985] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8541d571-466d-4a5f-a4c9-394a5e179651 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.906865] env[69994]: DEBUG oslo_vmware.api [None req-03042079-431b-4859-bd74-c9b0335c9106 tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Waiting for the task: (returnval){ [ 695.906865] env[69994]: value = "task-3241480" [ 695.906865] env[69994]: _type = "Task" [ 695.906865] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.914899] env[69994]: DEBUG oslo_vmware.api [None req-03042079-431b-4859-bd74-c9b0335c9106 tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Task: {'id': task-3241480, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.946650] env[69994]: DEBUG nova.network.neutron [req-4304ef02-8b1c-4091-8fe6-40267edbed98 req-5e0c1aae-ec9e-4cbb-ba69-39cc71b0e5a7 service nova] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Updated VIF entry in instance network info cache for port e90f5624-00b3-4a23-bedc-663e89b666df. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 695.947032] env[69994]: DEBUG nova.network.neutron [req-4304ef02-8b1c-4091-8fe6-40267edbed98 req-5e0c1aae-ec9e-4cbb-ba69-39cc71b0e5a7 service nova] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Updating instance_info_cache with network_info: [{"id": "e90f5624-00b3-4a23-bedc-663e89b666df", "address": "fa:16:3e:66:ff:af", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.173", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape90f5624-00", "ovs_interfaceid": "e90f5624-00b3-4a23-bedc-663e89b666df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.970788] env[69994]: DEBUG oslo_vmware.api [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241474, 'name': CloneVM_Task, 'duration_secs': 1.046928} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.971072] env[69994]: INFO nova.virt.vmwareapi.vmops [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Created linked-clone VM from snapshot [ 695.971788] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd988097-967c-41f6-9592-ea49cbdde64d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.979285] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Uploading image 9d2b9980-3b7b-48d8-90f1-76869ae8c772 {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 696.033918] env[69994]: DEBUG oslo_vmware.rw_handles [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 696.033918] env[69994]: value = "vm-647810" [ 696.033918] env[69994]: _type = "VirtualMachine" [ 696.033918] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 696.034573] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ac22906b-605c-4f6f-8d50-7b6fbd89d194 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.041220] env[69994]: DEBUG oslo_vmware.rw_handles [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Lease: (returnval){ [ 696.041220] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52ff4c41-7d40-f3f5-c9d8-45091066e218" [ 696.041220] env[69994]: _type = "HttpNfcLease" [ 696.041220] env[69994]: } obtained for exporting VM: (result){ [ 696.041220] env[69994]: value = "vm-647810" [ 696.041220] env[69994]: _type = "VirtualMachine" [ 696.041220] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 696.041608] env[69994]: DEBUG oslo_vmware.api [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Waiting for the lease: (returnval){ [ 696.041608] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52ff4c41-7d40-f3f5-c9d8-45091066e218" [ 696.041608] env[69994]: _type = "HttpNfcLease" [ 696.041608] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 696.047712] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 696.047712] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52ff4c41-7d40-f3f5-c9d8-45091066e218" [ 696.047712] env[69994]: _type = "HttpNfcLease" [ 696.047712] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 696.085162] env[69994]: DEBUG nova.compute.manager [req-00538f3e-af41-4dea-ad88-7d3a8ca5a40f req-408f11b0-76a1-4a5a-a1f5-24a32227e711 service nova] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Received event network-vif-plugged-150df766-08da-4092-b1e4-2e9c7a7cbf76 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 696.085162] env[69994]: DEBUG oslo_concurrency.lockutils [req-00538f3e-af41-4dea-ad88-7d3a8ca5a40f req-408f11b0-76a1-4a5a-a1f5-24a32227e711 service nova] Acquiring lock "e4013007-fd79-4d70-a9d1-70a4c621c0ea-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 696.085162] env[69994]: DEBUG oslo_concurrency.lockutils [req-00538f3e-af41-4dea-ad88-7d3a8ca5a40f req-408f11b0-76a1-4a5a-a1f5-24a32227e711 service nova] Lock "e4013007-fd79-4d70-a9d1-70a4c621c0ea-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 696.085162] env[69994]: DEBUG oslo_concurrency.lockutils [req-00538f3e-af41-4dea-ad88-7d3a8ca5a40f req-408f11b0-76a1-4a5a-a1f5-24a32227e711 service nova] Lock "e4013007-fd79-4d70-a9d1-70a4c621c0ea-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 696.085162] env[69994]: DEBUG nova.compute.manager [req-00538f3e-af41-4dea-ad88-7d3a8ca5a40f req-408f11b0-76a1-4a5a-a1f5-24a32227e711 service nova] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] No waiting events found dispatching network-vif-plugged-150df766-08da-4092-b1e4-2e9c7a7cbf76 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 696.085323] env[69994]: WARNING nova.compute.manager [req-00538f3e-af41-4dea-ad88-7d3a8ca5a40f req-408f11b0-76a1-4a5a-a1f5-24a32227e711 service nova] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Received unexpected event network-vif-plugged-150df766-08da-4092-b1e4-2e9c7a7cbf76 for instance with vm_state building and task_state spawning. [ 696.323956] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Acquiring lock "refresh_cache-e4013007-fd79-4d70-a9d1-70a4c621c0ea" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 696.323956] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Acquired lock "refresh_cache-e4013007-fd79-4d70-a9d1-70a4c621c0ea" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 696.323956] env[69994]: DEBUG nova.network.neutron [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 696.401148] env[69994]: DEBUG oslo_vmware.api [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52e20de3-dce1-621f-9663-8ef2f6dd0bbf, 'name': SearchDatastore_Task, 'duration_secs': 0.015134} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.405966] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbc3aac0-7689-4a85-bacf-c987dd5626e2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.407805] env[69994]: DEBUG oslo_vmware.api [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Waiting for the task: (returnval){ [ 696.407805] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]522b07cd-864f-5c1d-7085-b1b97182d1ed" [ 696.407805] env[69994]: _type = "Task" [ 696.407805] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.419558] env[69994]: DEBUG oslo_vmware.api [None req-03042079-431b-4859-bd74-c9b0335c9106 tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Task: {'id': task-3241480, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.129111} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.422723] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-03042079-431b-4859-bd74-c9b0335c9106 tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 696.422925] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-03042079-431b-4859-bd74-c9b0335c9106 tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 696.423118] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-03042079-431b-4859-bd74-c9b0335c9106 tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 696.423295] env[69994]: INFO nova.compute.manager [None req-03042079-431b-4859-bd74-c9b0335c9106 tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Took 1.12 seconds to destroy the instance on the hypervisor. [ 696.423536] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-03042079-431b-4859-bd74-c9b0335c9106 tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 696.423736] env[69994]: DEBUG oslo_vmware.api [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]522b07cd-864f-5c1d-7085-b1b97182d1ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.424052] env[69994]: DEBUG nova.compute.manager [-] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 696.424148] env[69994]: DEBUG nova.network.neutron [-] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 696.450680] env[69994]: DEBUG oslo_concurrency.lockutils [req-4304ef02-8b1c-4091-8fe6-40267edbed98 req-5e0c1aae-ec9e-4cbb-ba69-39cc71b0e5a7 service nova] Releasing lock "refresh_cache-0bfe4393-5b2a-487f-ba7a-858ed4c861a5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 696.557121] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 696.557121] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52ff4c41-7d40-f3f5-c9d8-45091066e218" [ 696.557121] env[69994]: _type = "HttpNfcLease" [ 696.557121] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 696.557432] env[69994]: DEBUG oslo_vmware.rw_handles [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 696.557432] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52ff4c41-7d40-f3f5-c9d8-45091066e218" [ 696.557432] env[69994]: _type = "HttpNfcLease" [ 696.557432] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 696.562458] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9f15510-4cea-4681-8621-91e593538511 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.566236] env[69994]: DEBUG oslo_vmware.rw_handles [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52804a57-f9a3-60f3-426f-b8d52fda1a41/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 696.566462] env[69994]: DEBUG oslo_vmware.rw_handles [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52804a57-f9a3-60f3-426f-b8d52fda1a41/disk-0.vmdk for reading. {{(pid=69994) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 696.657918] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-5485794c-052e-49db-8f94-f3f4493fab36 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.894666] env[69994]: DEBUG nova.network.neutron [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 696.928813] env[69994]: DEBUG oslo_vmware.api [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]522b07cd-864f-5c1d-7085-b1b97182d1ed, 'name': SearchDatastore_Task, 'duration_secs': 0.013754} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.929098] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 696.929364] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 0bfe4393-5b2a-487f-ba7a-858ed4c861a5/0bfe4393-5b2a-487f-ba7a-858ed4c861a5.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 696.929631] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7da37971-8c21-4a8a-bfaf-4474bd951640 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.936875] env[69994]: DEBUG oslo_vmware.api [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Waiting for the task: (returnval){ [ 696.936875] env[69994]: value = "task-3241482" [ 696.936875] env[69994]: _type = "Task" [ 696.936875] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.949661] env[69994]: DEBUG oslo_vmware.api [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Task: {'id': task-3241482, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.180399] env[69994]: DEBUG nova.network.neutron [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Updating instance_info_cache with network_info: [{"id": "150df766-08da-4092-b1e4-2e9c7a7cbf76", "address": "fa:16:3e:e4:f8:7b", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.236", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap150df766-08", "ovs_interfaceid": "150df766-08da-4092-b1e4-2e9c7a7cbf76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 697.421709] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquiring lock "b99b73e6-3348-4d5d-aa57-f01ace0bfc42" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.422081] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lock "b99b73e6-3348-4d5d-aa57-f01ace0bfc42" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.451934] env[69994]: DEBUG oslo_vmware.api [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Task: {'id': task-3241482, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.562915] env[69994]: DEBUG nova.network.neutron [-] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 697.631735] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b6cf3d1-f0ce-4cb3-860a-e6c393677246 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.641214] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efe62cb5-8175-433c-9b70-025d4544338d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.689061] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Releasing lock "refresh_cache-e4013007-fd79-4d70-a9d1-70a4c621c0ea" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 697.689424] env[69994]: DEBUG nova.compute.manager [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Instance network_info: |[{"id": "150df766-08da-4092-b1e4-2e9c7a7cbf76", "address": "fa:16:3e:e4:f8:7b", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.236", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap150df766-08", "ovs_interfaceid": "150df766-08da-4092-b1e4-2e9c7a7cbf76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 697.690125] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:f8:7b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0cd5d325-3053-407e-a4ee-f627e82a23f9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '150df766-08da-4092-b1e4-2e9c7a7cbf76', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 697.700442] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Creating folder: Project (9d5dcd4c69bb4d6db17dcfa3a7ace979). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 697.701245] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e863bfb-fc86-482a-98fd-1cb44e8e522d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.704109] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4bbea619-f688-4965-9cea-537125ba6ad1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.712244] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af0b9e4d-e492-4c1e-9dba-9e075674f8aa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.717396] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Created folder: Project (9d5dcd4c69bb4d6db17dcfa3a7ace979) in parent group-v647729. [ 697.717714] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Creating folder: Instances. Parent ref: group-v647814. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 697.718863] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0bd7a635-86ae-4bb7-8c6e-42e23e5c9b73 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.729238] env[69994]: DEBUG nova.compute.provider_tree [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 697.740101] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Created folder: Instances in parent group-v647814. [ 697.740375] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 697.740743] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 697.741218] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-64c8c15a-06d1-475b-9eb8-cd75fc2850d0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.763035] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 697.763035] env[69994]: value = "task-3241485" [ 697.763035] env[69994]: _type = "Task" [ 697.763035] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.771603] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241485, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.950141] env[69994]: DEBUG oslo_vmware.api [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Task: {'id': task-3241482, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.611749} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.950141] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 0bfe4393-5b2a-487f-ba7a-858ed4c861a5/0bfe4393-5b2a-487f-ba7a-858ed4c861a5.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 697.950141] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 697.950141] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fda1b2c6-4803-4c3a-9749-cb88aa308e4f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.956275] env[69994]: DEBUG oslo_vmware.api [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Waiting for the task: (returnval){ [ 697.956275] env[69994]: value = "task-3241486" [ 697.956275] env[69994]: _type = "Task" [ 697.956275] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.964704] env[69994]: DEBUG oslo_vmware.api [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Task: {'id': task-3241486, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.063851] env[69994]: INFO nova.compute.manager [-] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Took 1.64 seconds to deallocate network for instance. [ 698.233257] env[69994]: DEBUG nova.scheduler.client.report [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 698.280126] env[69994]: DEBUG nova.compute.manager [req-b1e6500a-bd2c-4be6-bddc-af4ec42257be req-0b06048f-f8ae-4609-8196-535d1be0f847 service nova] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Received event network-changed-150df766-08da-4092-b1e4-2e9c7a7cbf76 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 698.280432] env[69994]: DEBUG nova.compute.manager [req-b1e6500a-bd2c-4be6-bddc-af4ec42257be req-0b06048f-f8ae-4609-8196-535d1be0f847 service nova] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Refreshing instance network info cache due to event network-changed-150df766-08da-4092-b1e4-2e9c7a7cbf76. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 698.280739] env[69994]: DEBUG oslo_concurrency.lockutils [req-b1e6500a-bd2c-4be6-bddc-af4ec42257be req-0b06048f-f8ae-4609-8196-535d1be0f847 service nova] Acquiring lock "refresh_cache-e4013007-fd79-4d70-a9d1-70a4c621c0ea" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 698.280895] env[69994]: DEBUG oslo_concurrency.lockutils [req-b1e6500a-bd2c-4be6-bddc-af4ec42257be req-0b06048f-f8ae-4609-8196-535d1be0f847 service nova] Acquired lock "refresh_cache-e4013007-fd79-4d70-a9d1-70a4c621c0ea" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 698.285076] env[69994]: DEBUG nova.network.neutron [req-b1e6500a-bd2c-4be6-bddc-af4ec42257be req-0b06048f-f8ae-4609-8196-535d1be0f847 service nova] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Refreshing network info cache for port 150df766-08da-4092-b1e4-2e9c7a7cbf76 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 698.286400] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241485, 'name': CreateVM_Task, 'duration_secs': 0.350022} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.286607] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 698.287336] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 698.287563] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 698.288013] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 698.288536] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3ecdb3a-7bab-49e6-9e7f-7f441819a417 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.305436] env[69994]: DEBUG oslo_vmware.api [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Waiting for the task: (returnval){ [ 698.305436] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5246b538-f28b-8081-93de-b306d6c44809" [ 698.305436] env[69994]: _type = "Task" [ 698.305436] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.314918] env[69994]: DEBUG oslo_vmware.api [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5246b538-f28b-8081-93de-b306d6c44809, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.466110] env[69994]: DEBUG oslo_vmware.api [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Task: {'id': task-3241486, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.139614} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.468841] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 698.469801] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06b0e754-5980-4744-8659-cc2c714c0523 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.502400] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Reconfiguring VM instance instance-0000001b to attach disk [datastore2] 0bfe4393-5b2a-487f-ba7a-858ed4c861a5/0bfe4393-5b2a-487f-ba7a-858ed4c861a5.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 698.503180] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-317b4c98-ad13-4f35-b534-8aca68a55c89 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.526028] env[69994]: DEBUG oslo_vmware.api [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Waiting for the task: (returnval){ [ 698.526028] env[69994]: value = "task-3241487" [ 698.526028] env[69994]: _type = "Task" [ 698.526028] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.534307] env[69994]: DEBUG oslo_vmware.api [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Task: {'id': task-3241487, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.572092] env[69994]: DEBUG oslo_concurrency.lockutils [None req-03042079-431b-4859-bd74-c9b0335c9106 tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 698.739072] env[69994]: DEBUG oslo_concurrency.lockutils [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.932s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 698.739700] env[69994]: DEBUG nova.compute.manager [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 698.746022] env[69994]: DEBUG oslo_concurrency.lockutils [None req-476fd883-e2c1-473b-92e5-8ae3b18e70b3 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.197s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 698.746022] env[69994]: DEBUG nova.objects.instance [None req-476fd883-e2c1-473b-92e5-8ae3b18e70b3 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Lazy-loading 'resources' on Instance uuid 21f66039-6292-4d9c-b97d-668d029def24 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 698.768792] env[69994]: DEBUG nova.network.neutron [req-b1e6500a-bd2c-4be6-bddc-af4ec42257be req-0b06048f-f8ae-4609-8196-535d1be0f847 service nova] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Updated VIF entry in instance network info cache for port 150df766-08da-4092-b1e4-2e9c7a7cbf76. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 698.769223] env[69994]: DEBUG nova.network.neutron [req-b1e6500a-bd2c-4be6-bddc-af4ec42257be req-0b06048f-f8ae-4609-8196-535d1be0f847 service nova] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Updating instance_info_cache with network_info: [{"id": "150df766-08da-4092-b1e4-2e9c7a7cbf76", "address": "fa:16:3e:e4:f8:7b", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.236", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap150df766-08", "ovs_interfaceid": "150df766-08da-4092-b1e4-2e9c7a7cbf76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.817439] env[69994]: DEBUG oslo_vmware.api [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5246b538-f28b-8081-93de-b306d6c44809, 'name': SearchDatastore_Task, 'duration_secs': 0.009831} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.818140] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 698.818681] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 698.818931] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 698.819098] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 698.819279] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 698.820093] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b7a70a37-5184-4557-af90-115c015a7dcb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.828554] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 698.828939] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 698.829767] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad066ce0-6f39-4b94-a06f-9fd343bb54f1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.835307] env[69994]: DEBUG oslo_vmware.api [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Waiting for the task: (returnval){ [ 698.835307] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]521fba04-e63d-16f6-4be1-40eb67a30755" [ 698.835307] env[69994]: _type = "Task" [ 698.835307] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.844158] env[69994]: DEBUG oslo_vmware.api [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521fba04-e63d-16f6-4be1-40eb67a30755, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.036799] env[69994]: DEBUG oslo_vmware.api [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Task: {'id': task-3241487, 'name': ReconfigVM_Task, 'duration_secs': 0.499926} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.037228] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Reconfigured VM instance instance-0000001b to attach disk [datastore2] 0bfe4393-5b2a-487f-ba7a-858ed4c861a5/0bfe4393-5b2a-487f-ba7a-858ed4c861a5.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 699.041393] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-60654dca-8f8c-4e06-b4cd-618429ec71ae {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.043819] env[69994]: DEBUG oslo_vmware.api [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Waiting for the task: (returnval){ [ 699.043819] env[69994]: value = "task-3241488" [ 699.043819] env[69994]: _type = "Task" [ 699.043819] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.051954] env[69994]: DEBUG oslo_vmware.api [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Task: {'id': task-3241488, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.246758] env[69994]: DEBUG nova.compute.utils [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 699.251908] env[69994]: DEBUG nova.compute.manager [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 699.252115] env[69994]: DEBUG nova.network.neutron [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 699.273094] env[69994]: DEBUG oslo_concurrency.lockutils [req-b1e6500a-bd2c-4be6-bddc-af4ec42257be req-0b06048f-f8ae-4609-8196-535d1be0f847 service nova] Releasing lock "refresh_cache-e4013007-fd79-4d70-a9d1-70a4c621c0ea" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 699.273339] env[69994]: DEBUG nova.compute.manager [req-b1e6500a-bd2c-4be6-bddc-af4ec42257be req-0b06048f-f8ae-4609-8196-535d1be0f847 service nova] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Received event network-vif-deleted-e91dc2f6-726a-485a-9736-6de1fad4e0ae {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 699.345706] env[69994]: DEBUG oslo_vmware.api [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521fba04-e63d-16f6-4be1-40eb67a30755, 'name': SearchDatastore_Task, 'duration_secs': 0.021019} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.348945] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47bb45e9-3dd7-4376-8dcc-6b6a6e2e8284 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.354233] env[69994]: DEBUG oslo_vmware.api [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Waiting for the task: (returnval){ [ 699.354233] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]525d820c-500e-639b-92f5-fa326784574f" [ 699.354233] env[69994]: _type = "Task" [ 699.354233] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.363810] env[69994]: DEBUG oslo_vmware.api [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]525d820c-500e-639b-92f5-fa326784574f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.411819] env[69994]: DEBUG nova.policy [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9cc52f7cc9e545e68464a382a2d5198e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9d5dcd4c69bb4d6db17dcfa3a7ace979', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 699.553555] env[69994]: DEBUG oslo_vmware.api [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Task: {'id': task-3241488, 'name': Rename_Task, 'duration_secs': 0.181888} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.556435] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 699.557049] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1da27d9f-5114-4609-8da0-b0920c0b593a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.563240] env[69994]: DEBUG oslo_vmware.api [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Waiting for the task: (returnval){ [ 699.563240] env[69994]: value = "task-3241489" [ 699.563240] env[69994]: _type = "Task" [ 699.563240] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.571281] env[69994]: DEBUG oslo_vmware.api [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Task: {'id': task-3241489, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.752661] env[69994]: DEBUG nova.compute.manager [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 699.869699] env[69994]: DEBUG oslo_vmware.api [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]525d820c-500e-639b-92f5-fa326784574f, 'name': SearchDatastore_Task, 'duration_secs': 0.011112} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.870014] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 699.870643] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] e4013007-fd79-4d70-a9d1-70a4c621c0ea/e4013007-fd79-4d70-a9d1-70a4c621c0ea.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 699.870643] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ac76939b-5b81-477f-9b69-92dc96a4d6e3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.875197] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42f31961-999f-4eba-8c31-8363d1822bf1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.879618] env[69994]: DEBUG oslo_vmware.api [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Waiting for the task: (returnval){ [ 699.879618] env[69994]: value = "task-3241490" [ 699.879618] env[69994]: _type = "Task" [ 699.879618] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.886630] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c18f749-6f3f-4fa3-a56a-e125e3bbce14 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.894289] env[69994]: DEBUG oslo_vmware.api [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241490, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.922991] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-725b4342-cea3-48e1-8311-ef9f7577cc43 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.931827] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59195d7e-18fb-45a1-8e15-a1194ec65355 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.946904] env[69994]: DEBUG nova.compute.provider_tree [None req-476fd883-e2c1-473b-92e5-8ae3b18e70b3 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 700.008044] env[69994]: DEBUG nova.network.neutron [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Successfully created port: 825d8824-77bf-4808-8066-32caf413dbc7 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 700.078743] env[69994]: DEBUG oslo_vmware.api [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Task: {'id': task-3241489, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.390920] env[69994]: DEBUG oslo_vmware.api [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241490, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.450086] env[69994]: DEBUG nova.scheduler.client.report [None req-476fd883-e2c1-473b-92e5-8ae3b18e70b3 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 700.583018] env[69994]: DEBUG oslo_vmware.api [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Task: {'id': task-3241489, 'name': PowerOnVM_Task, 'duration_secs': 0.579622} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.583018] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 700.583018] env[69994]: INFO nova.compute.manager [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Took 9.37 seconds to spawn the instance on the hypervisor. [ 700.583018] env[69994]: DEBUG nova.compute.manager [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 700.583018] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cf1cd7f-70ce-4bc7-99de-831e058851a8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.766470] env[69994]: DEBUG nova.compute.manager [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 700.822084] env[69994]: DEBUG nova.virt.hardware [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 700.822349] env[69994]: DEBUG nova.virt.hardware [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 700.822507] env[69994]: DEBUG nova.virt.hardware [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 700.822685] env[69994]: DEBUG nova.virt.hardware [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 700.822931] env[69994]: DEBUG nova.virt.hardware [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 700.825249] env[69994]: DEBUG nova.virt.hardware [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 700.825499] env[69994]: DEBUG nova.virt.hardware [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 700.825665] env[69994]: DEBUG nova.virt.hardware [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 700.825831] env[69994]: DEBUG nova.virt.hardware [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 700.825993] env[69994]: DEBUG nova.virt.hardware [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 700.826179] env[69994]: DEBUG nova.virt.hardware [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 700.827370] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90a40e23-3a3e-4835-9ecb-6e9ab5744b61 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.835907] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc9e2223-ab6c-4b9d-9160-b53c4f094e43 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.890611] env[69994]: DEBUG oslo_vmware.api [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241490, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.56263} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.890909] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] e4013007-fd79-4d70-a9d1-70a4c621c0ea/e4013007-fd79-4d70-a9d1-70a4c621c0ea.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 700.891130] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 700.891374] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-74c8a3bc-8045-4ebf-8597-bfebe56f81e0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.897834] env[69994]: DEBUG oslo_vmware.api [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Waiting for the task: (returnval){ [ 700.897834] env[69994]: value = "task-3241491" [ 700.897834] env[69994]: _type = "Task" [ 700.897834] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.905299] env[69994]: DEBUG oslo_vmware.api [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241491, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.955973] env[69994]: DEBUG oslo_concurrency.lockutils [None req-476fd883-e2c1-473b-92e5-8ae3b18e70b3 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.213s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 700.958737] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2c7eea68-37c2-43c3-823d-8545f11832ea tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 28.171s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 701.022337] env[69994]: INFO nova.scheduler.client.report [None req-476fd883-e2c1-473b-92e5-8ae3b18e70b3 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Deleted allocations for instance 21f66039-6292-4d9c-b97d-668d029def24 [ 701.100754] env[69994]: INFO nova.compute.manager [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Took 40.27 seconds to build instance. [ 701.408675] env[69994]: DEBUG oslo_vmware.api [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241491, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06569} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.408839] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 701.409636] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45bd657d-c5b5-4152-8458-4eaed4e1a92e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.433012] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Reconfiguring VM instance instance-0000001c to attach disk [datastore2] e4013007-fd79-4d70-a9d1-70a4c621c0ea/e4013007-fd79-4d70-a9d1-70a4c621c0ea.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 701.433323] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-54033fe2-3292-4953-80e6-3b66fe7a275c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.452943] env[69994]: DEBUG oslo_vmware.api [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Waiting for the task: (returnval){ [ 701.452943] env[69994]: value = "task-3241492" [ 701.452943] env[69994]: _type = "Task" [ 701.452943] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.460783] env[69994]: DEBUG oslo_vmware.api [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241492, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.531487] env[69994]: DEBUG oslo_concurrency.lockutils [None req-476fd883-e2c1-473b-92e5-8ae3b18e70b3 tempest-ServersAdmin275Test-841048262 tempest-ServersAdmin275Test-841048262-project-member] Lock "21f66039-6292-4d9c-b97d-668d029def24" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.256s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 701.603741] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08944367-8c84-4664-8b90-9e4d0e6dee57 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Lock "0bfe4393-5b2a-487f-ba7a-858ed4c861a5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.223s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 701.968107] env[69994]: DEBUG oslo_vmware.api [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241492, 'name': ReconfigVM_Task, 'duration_secs': 0.334278} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.971212] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Reconfigured VM instance instance-0000001c to attach disk [datastore2] e4013007-fd79-4d70-a9d1-70a4c621c0ea/e4013007-fd79-4d70-a9d1-70a4c621c0ea.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 701.972192] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e5f661c3-1a08-49f5-baef-83a2764557a0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.979274] env[69994]: DEBUG oslo_vmware.api [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Waiting for the task: (returnval){ [ 701.979274] env[69994]: value = "task-3241493" [ 701.979274] env[69994]: _type = "Task" [ 701.979274] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.993341] env[69994]: DEBUG oslo_vmware.api [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241493, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.088658] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f083f0a-74b2-4bcd-bb90-e2b1f56c3ed6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.097031] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90e4a1d1-4dd5-4ccf-93c0-552a85deacc9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.108124] env[69994]: DEBUG nova.network.neutron [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Successfully updated port: 825d8824-77bf-4808-8066-32caf413dbc7 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 702.110642] env[69994]: DEBUG nova.compute.manager [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 702.159962] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8cbd253-de11-4fa6-a8a7-5861a16613c9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.170120] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63e2ed58-45e6-43be-ac96-361a96adb5ec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.188452] env[69994]: DEBUG nova.compute.provider_tree [None req-2c7eea68-37c2-43c3-823d-8545f11832ea tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 702.491755] env[69994]: DEBUG oslo_vmware.api [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241493, 'name': Rename_Task, 'duration_secs': 0.141539} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.492078] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 702.494110] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6a73d9d0-5cf5-4dbb-809c-99dc7ac36243 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.502187] env[69994]: DEBUG oslo_vmware.api [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Waiting for the task: (returnval){ [ 702.502187] env[69994]: value = "task-3241494" [ 702.502187] env[69994]: _type = "Task" [ 702.502187] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.513863] env[69994]: DEBUG oslo_vmware.api [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241494, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.523614] env[69994]: DEBUG nova.compute.manager [req-d220b2aa-0077-425a-aaca-3182f7ba550d req-18384e8c-3de5-40cd-9ad1-af77cdcbffde service nova] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Received event network-vif-plugged-825d8824-77bf-4808-8066-32caf413dbc7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 702.523849] env[69994]: DEBUG oslo_concurrency.lockutils [req-d220b2aa-0077-425a-aaca-3182f7ba550d req-18384e8c-3de5-40cd-9ad1-af77cdcbffde service nova] Acquiring lock "153f0ead-6e2f-4077-b86a-00d3a1114fed-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 702.524074] env[69994]: DEBUG oslo_concurrency.lockutils [req-d220b2aa-0077-425a-aaca-3182f7ba550d req-18384e8c-3de5-40cd-9ad1-af77cdcbffde service nova] Lock "153f0ead-6e2f-4077-b86a-00d3a1114fed-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 702.524290] env[69994]: DEBUG oslo_concurrency.lockutils [req-d220b2aa-0077-425a-aaca-3182f7ba550d req-18384e8c-3de5-40cd-9ad1-af77cdcbffde service nova] Lock "153f0ead-6e2f-4077-b86a-00d3a1114fed-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 702.524496] env[69994]: DEBUG nova.compute.manager [req-d220b2aa-0077-425a-aaca-3182f7ba550d req-18384e8c-3de5-40cd-9ad1-af77cdcbffde service nova] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] No waiting events found dispatching network-vif-plugged-825d8824-77bf-4808-8066-32caf413dbc7 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 702.524712] env[69994]: WARNING nova.compute.manager [req-d220b2aa-0077-425a-aaca-3182f7ba550d req-18384e8c-3de5-40cd-9ad1-af77cdcbffde service nova] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Received unexpected event network-vif-plugged-825d8824-77bf-4808-8066-32caf413dbc7 for instance with vm_state building and task_state spawning. [ 702.617617] env[69994]: DEBUG oslo_concurrency.lockutils [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Acquiring lock "refresh_cache-153f0ead-6e2f-4077-b86a-00d3a1114fed" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 702.617787] env[69994]: DEBUG oslo_concurrency.lockutils [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Acquired lock "refresh_cache-153f0ead-6e2f-4077-b86a-00d3a1114fed" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 702.617942] env[69994]: DEBUG nova.network.neutron [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 702.693011] env[69994]: DEBUG nova.scheduler.client.report [None req-2c7eea68-37c2-43c3-823d-8545f11832ea tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 702.697354] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 703.013513] env[69994]: DEBUG oslo_vmware.api [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241494, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.304169] env[69994]: DEBUG nova.network.neutron [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 703.515489] env[69994]: DEBUG oslo_vmware.api [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241494, 'name': PowerOnVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.540958] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c5fe9601-7ff8-47f3-840a-4fe60da93a99 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Acquiring lock "0bfe4393-5b2a-487f-ba7a-858ed4c861a5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 703.541250] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c5fe9601-7ff8-47f3-840a-4fe60da93a99 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Lock "0bfe4393-5b2a-487f-ba7a-858ed4c861a5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 703.541462] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c5fe9601-7ff8-47f3-840a-4fe60da93a99 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Acquiring lock "0bfe4393-5b2a-487f-ba7a-858ed4c861a5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 703.541639] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c5fe9601-7ff8-47f3-840a-4fe60da93a99 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Lock "0bfe4393-5b2a-487f-ba7a-858ed4c861a5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 703.541816] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c5fe9601-7ff8-47f3-840a-4fe60da93a99 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Lock "0bfe4393-5b2a-487f-ba7a-858ed4c861a5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 703.544025] env[69994]: INFO nova.compute.manager [None req-c5fe9601-7ff8-47f3-840a-4fe60da93a99 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Terminating instance [ 703.574455] env[69994]: DEBUG nova.network.neutron [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Updating instance_info_cache with network_info: [{"id": "825d8824-77bf-4808-8066-32caf413dbc7", "address": "fa:16:3e:4a:d1:f4", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.159", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap825d8824-77", "ovs_interfaceid": "825d8824-77bf-4808-8066-32caf413dbc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.709481] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2c7eea68-37c2-43c3-823d-8545f11832ea tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.750s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 703.713948] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cf392351-b8a4-4f3f-bdd4-3218dba33656 tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.146s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 703.714250] env[69994]: DEBUG nova.objects.instance [None req-cf392351-b8a4-4f3f-bdd4-3218dba33656 tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Lazy-loading 'resources' on Instance uuid 5badecfd-5784-4968-8519-419a01c67465 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 704.019934] env[69994]: DEBUG oslo_vmware.api [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241494, 'name': PowerOnVM_Task, 'duration_secs': 1.070145} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.020254] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 704.020497] env[69994]: INFO nova.compute.manager [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Took 9.97 seconds to spawn the instance on the hypervisor. [ 704.020683] env[69994]: DEBUG nova.compute.manager [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 704.023899] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40d111ab-32de-4ca0-84bd-a9438261baa3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.052115] env[69994]: DEBUG nova.compute.manager [None req-c5fe9601-7ff8-47f3-840a-4fe60da93a99 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 704.052115] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c5fe9601-7ff8-47f3-840a-4fe60da93a99 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 704.053360] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d84b9274-7b76-4cd6-897f-2d9ee40c42cf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.062619] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5fe9601-7ff8-47f3-840a-4fe60da93a99 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 704.062619] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f3831c32-872a-44f2-a0d7-9e908ee8df24 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.069490] env[69994]: DEBUG oslo_vmware.api [None req-c5fe9601-7ff8-47f3-840a-4fe60da93a99 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Waiting for the task: (returnval){ [ 704.069490] env[69994]: value = "task-3241495" [ 704.069490] env[69994]: _type = "Task" [ 704.069490] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.079989] env[69994]: DEBUG oslo_concurrency.lockutils [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Releasing lock "refresh_cache-153f0ead-6e2f-4077-b86a-00d3a1114fed" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 704.080369] env[69994]: DEBUG nova.compute.manager [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Instance network_info: |[{"id": "825d8824-77bf-4808-8066-32caf413dbc7", "address": "fa:16:3e:4a:d1:f4", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.159", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap825d8824-77", "ovs_interfaceid": "825d8824-77bf-4808-8066-32caf413dbc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 704.080770] env[69994]: DEBUG oslo_vmware.api [None req-c5fe9601-7ff8-47f3-840a-4fe60da93a99 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Task: {'id': task-3241495, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.081094] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4a:d1:f4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0cd5d325-3053-407e-a4ee-f627e82a23f9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '825d8824-77bf-4808-8066-32caf413dbc7', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 704.089697] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 704.089930] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 704.090170] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-09469dbc-a727-4e3b-b549-b5a285bd406c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.116404] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 704.116404] env[69994]: value = "task-3241496" [ 704.116404] env[69994]: _type = "Task" [ 704.116404] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.127370] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241496, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.373141] env[69994]: INFO nova.scheduler.client.report [None req-2c7eea68-37c2-43c3-823d-8545f11832ea tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Deleted allocation for migration d1d290b1-0dbc-4360-9317-4b05f33c89a3 [ 704.550246] env[69994]: INFO nova.compute.manager [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Took 43.05 seconds to build instance. [ 704.584807] env[69994]: DEBUG oslo_vmware.api [None req-c5fe9601-7ff8-47f3-840a-4fe60da93a99 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Task: {'id': task-3241495, 'name': PowerOffVM_Task, 'duration_secs': 0.244356} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.587678] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5fe9601-7ff8-47f3-840a-4fe60da93a99 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 704.587874] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c5fe9601-7ff8-47f3-840a-4fe60da93a99 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 704.588338] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-09920453-02ea-41d1-a01a-b09a5ef39e38 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.632370] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241496, 'name': CreateVM_Task, 'duration_secs': 0.412197} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.635594] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 704.638041] env[69994]: DEBUG oslo_concurrency.lockutils [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 704.638041] env[69994]: DEBUG oslo_concurrency.lockutils [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 704.638041] env[69994]: DEBUG oslo_concurrency.lockutils [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 704.638209] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-256a7345-8041-41e0-8be0-84e6a3e18978 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.643388] env[69994]: DEBUG oslo_vmware.api [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Waiting for the task: (returnval){ [ 704.643388] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52547789-e8c0-4f7c-a1e5-0d5ec8de9156" [ 704.643388] env[69994]: _type = "Task" [ 704.643388] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.653456] env[69994]: DEBUG oslo_vmware.api [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52547789-e8c0-4f7c-a1e5-0d5ec8de9156, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.669476] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c5fe9601-7ff8-47f3-840a-4fe60da93a99 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 704.669813] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c5fe9601-7ff8-47f3-840a-4fe60da93a99 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 704.670046] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5fe9601-7ff8-47f3-840a-4fe60da93a99 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Deleting the datastore file [datastore2] 0bfe4393-5b2a-487f-ba7a-858ed4c861a5 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 704.673287] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8e9ad3ad-4c63-4efc-b894-9688db5eb196 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.679469] env[69994]: DEBUG oslo_vmware.api [None req-c5fe9601-7ff8-47f3-840a-4fe60da93a99 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Waiting for the task: (returnval){ [ 704.679469] env[69994]: value = "task-3241498" [ 704.679469] env[69994]: _type = "Task" [ 704.679469] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.689845] env[69994]: DEBUG oslo_vmware.api [None req-c5fe9601-7ff8-47f3-840a-4fe60da93a99 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Task: {'id': task-3241498, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.883513] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2c7eea68-37c2-43c3-823d-8545f11832ea tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Lock "dbad6bed-64ba-4dfd-abad-c0b2c775ba2c" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 34.775s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 704.914398] env[69994]: DEBUG nova.compute.manager [req-df0e218a-5da0-4903-850a-88f39e20bdbf req-40c354ad-b07e-4831-bd8d-3bf1a75b005f service nova] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Received event network-changed-825d8824-77bf-4808-8066-32caf413dbc7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 704.914398] env[69994]: DEBUG nova.compute.manager [req-df0e218a-5da0-4903-850a-88f39e20bdbf req-40c354ad-b07e-4831-bd8d-3bf1a75b005f service nova] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Refreshing instance network info cache due to event network-changed-825d8824-77bf-4808-8066-32caf413dbc7. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 704.914398] env[69994]: DEBUG oslo_concurrency.lockutils [req-df0e218a-5da0-4903-850a-88f39e20bdbf req-40c354ad-b07e-4831-bd8d-3bf1a75b005f service nova] Acquiring lock "refresh_cache-153f0ead-6e2f-4077-b86a-00d3a1114fed" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 704.914398] env[69994]: DEBUG oslo_concurrency.lockutils [req-df0e218a-5da0-4903-850a-88f39e20bdbf req-40c354ad-b07e-4831-bd8d-3bf1a75b005f service nova] Acquired lock "refresh_cache-153f0ead-6e2f-4077-b86a-00d3a1114fed" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 704.914398] env[69994]: DEBUG nova.network.neutron [req-df0e218a-5da0-4903-850a-88f39e20bdbf req-40c354ad-b07e-4831-bd8d-3bf1a75b005f service nova] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Refreshing network info cache for port 825d8824-77bf-4808-8066-32caf413dbc7 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 704.993372] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7286af5c-06a2-4d73-8dc3-4d032c4d55be {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.001297] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-743abb30-b6b1-4096-ba7e-19610022e38d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.043317] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e8caad3-83fc-42d2-a205-c3d3b91f0cb7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.051902] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bab65671-80ac-498a-b5d1-e66df2484c27 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.055171] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60abb943-f0f6-47ea-a081-321e865a5810 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Lock "e4013007-fd79-4d70-a9d1-70a4c621c0ea" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.444s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 705.067889] env[69994]: DEBUG nova.compute.provider_tree [None req-cf392351-b8a4-4f3f-bdd4-3218dba33656 tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 705.156952] env[69994]: DEBUG oslo_vmware.api [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52547789-e8c0-4f7c-a1e5-0d5ec8de9156, 'name': SearchDatastore_Task, 'duration_secs': 0.0125} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.157265] env[69994]: DEBUG oslo_concurrency.lockutils [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 705.157492] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 705.157764] env[69994]: DEBUG oslo_concurrency.lockutils [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 705.157906] env[69994]: DEBUG oslo_concurrency.lockutils [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 705.158089] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 705.158351] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-047e7ea7-254a-4d18-a48f-8db0ffec084b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.171318] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 705.171506] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 705.172316] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a710ac33-0d1a-4a2e-a32c-97ec0cfc77bd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.185250] env[69994]: DEBUG oslo_vmware.api [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Waiting for the task: (returnval){ [ 705.185250] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52cf9cf9-cd3c-b3eb-079f-08a1033d8fb3" [ 705.185250] env[69994]: _type = "Task" [ 705.185250] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.189284] env[69994]: DEBUG oslo_vmware.api [None req-c5fe9601-7ff8-47f3-840a-4fe60da93a99 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Task: {'id': task-3241498, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.400194} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.193556] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5fe9601-7ff8-47f3-840a-4fe60da93a99 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 705.193747] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c5fe9601-7ff8-47f3-840a-4fe60da93a99 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 705.193928] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c5fe9601-7ff8-47f3-840a-4fe60da93a99 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 705.194115] env[69994]: INFO nova.compute.manager [None req-c5fe9601-7ff8-47f3-840a-4fe60da93a99 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Took 1.14 seconds to destroy the instance on the hypervisor. [ 705.194343] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c5fe9601-7ff8-47f3-840a-4fe60da93a99 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 705.194529] env[69994]: DEBUG nova.compute.manager [-] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 705.194621] env[69994]: DEBUG nova.network.neutron [-] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 705.201534] env[69994]: DEBUG oslo_vmware.api [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52cf9cf9-cd3c-b3eb-079f-08a1033d8fb3, 'name': SearchDatastore_Task, 'duration_secs': 0.009134} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.202284] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5500721-e4c4-4ff0-8ad9-810fe47e66a2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.207564] env[69994]: DEBUG oslo_vmware.api [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Waiting for the task: (returnval){ [ 705.207564] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5289b207-687b-4899-45c0-d10e9338f2d9" [ 705.207564] env[69994]: _type = "Task" [ 705.207564] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.215000] env[69994]: DEBUG oslo_vmware.api [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5289b207-687b-4899-45c0-d10e9338f2d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.558623] env[69994]: DEBUG nova.compute.manager [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 705.573604] env[69994]: DEBUG nova.scheduler.client.report [None req-cf392351-b8a4-4f3f-bdd4-3218dba33656 tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 705.724155] env[69994]: DEBUG oslo_vmware.api [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5289b207-687b-4899-45c0-d10e9338f2d9, 'name': SearchDatastore_Task, 'duration_secs': 0.010721} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.724514] env[69994]: DEBUG oslo_concurrency.lockutils [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 705.724820] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 153f0ead-6e2f-4077-b86a-00d3a1114fed/153f0ead-6e2f-4077-b86a-00d3a1114fed.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 705.725165] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7885b34a-222f-44c4-b285-968f606fe9f6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.733730] env[69994]: DEBUG oslo_vmware.api [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Waiting for the task: (returnval){ [ 705.733730] env[69994]: value = "task-3241499" [ 705.733730] env[69994]: _type = "Task" [ 705.733730] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.743280] env[69994]: DEBUG oslo_vmware.api [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241499, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.981967] env[69994]: DEBUG nova.network.neutron [req-df0e218a-5da0-4903-850a-88f39e20bdbf req-40c354ad-b07e-4831-bd8d-3bf1a75b005f service nova] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Updated VIF entry in instance network info cache for port 825d8824-77bf-4808-8066-32caf413dbc7. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 705.981967] env[69994]: DEBUG nova.network.neutron [req-df0e218a-5da0-4903-850a-88f39e20bdbf req-40c354ad-b07e-4831-bd8d-3bf1a75b005f service nova] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Updating instance_info_cache with network_info: [{"id": "825d8824-77bf-4808-8066-32caf413dbc7", "address": "fa:16:3e:4a:d1:f4", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.159", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap825d8824-77", "ovs_interfaceid": "825d8824-77bf-4808-8066-32caf413dbc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.081237] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cf392351-b8a4-4f3f-bdd4-3218dba33656 tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.365s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 706.083912] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.446s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 706.084351] env[69994]: DEBUG nova.objects.instance [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Lazy-loading 'resources' on Instance uuid 91bb882c-7b84-450f-bd03-91ea1ce739ce {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 706.107179] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 706.141219] env[69994]: INFO nova.scheduler.client.report [None req-cf392351-b8a4-4f3f-bdd4-3218dba33656 tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Deleted allocations for instance 5badecfd-5784-4968-8519-419a01c67465 [ 706.245028] env[69994]: DEBUG oslo_vmware.api [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241499, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.452828} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.246041] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 153f0ead-6e2f-4077-b86a-00d3a1114fed/153f0ead-6e2f-4077-b86a-00d3a1114fed.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 706.246041] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 706.246041] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-90e41614-f6f6-4ff3-b5b6-1173e056c93c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.253574] env[69994]: DEBUG oslo_vmware.api [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Waiting for the task: (returnval){ [ 706.253574] env[69994]: value = "task-3241500" [ 706.253574] env[69994]: _type = "Task" [ 706.253574] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.263522] env[69994]: DEBUG oslo_vmware.api [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241500, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.364968] env[69994]: DEBUG nova.network.neutron [-] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.485234] env[69994]: DEBUG oslo_concurrency.lockutils [req-df0e218a-5da0-4903-850a-88f39e20bdbf req-40c354ad-b07e-4831-bd8d-3bf1a75b005f service nova] Releasing lock "refresh_cache-153f0ead-6e2f-4077-b86a-00d3a1114fed" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 706.656653] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cf392351-b8a4-4f3f-bdd4-3218dba33656 tempest-InstanceActionsTestJSON-29894176 tempest-InstanceActionsTestJSON-29894176-project-member] Lock "5badecfd-5784-4968-8519-419a01c67465" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.680s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 706.772212] env[69994]: DEBUG oslo_vmware.api [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241500, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088019} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.775315] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 706.776396] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d67e0c0-5276-45f1-8998-ebd842ff2ddc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.802744] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Reconfiguring VM instance instance-0000001d to attach disk [datastore1] 153f0ead-6e2f-4077-b86a-00d3a1114fed/153f0ead-6e2f-4077-b86a-00d3a1114fed.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 706.805798] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3e73d6b4-779b-47b4-8cab-3995e4eb06f3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.826740] env[69994]: DEBUG oslo_vmware.api [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Waiting for the task: (returnval){ [ 706.826740] env[69994]: value = "task-3241501" [ 706.826740] env[69994]: _type = "Task" [ 706.826740] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.837408] env[69994]: DEBUG oslo_vmware.api [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241501, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.868090] env[69994]: INFO nova.compute.manager [-] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Took 1.67 seconds to deallocate network for instance. [ 707.181957] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquiring lock "f07750f5-3f1d-4d97-98dc-285ed357cc7e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 707.182291] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Lock "f07750f5-3f1d-4d97-98dc-285ed357cc7e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 707.222730] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3275c0b4-29cc-4172-8b9d-98e7b48a9620 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.236054] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05241823-d28c-4778-8bef-14079b0cab93 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.268745] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d912353f-dd24-4953-bbc5-3d02870e9023 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.277502] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d8b23dd-e11e-4c8b-ac1f-ffdd2d6a1986 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.290189] env[69994]: DEBUG nova.compute.provider_tree [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 707.340173] env[69994]: DEBUG oslo_vmware.api [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241501, 'name': ReconfigVM_Task, 'duration_secs': 0.359036} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.340173] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Reconfigured VM instance instance-0000001d to attach disk [datastore1] 153f0ead-6e2f-4077-b86a-00d3a1114fed/153f0ead-6e2f-4077-b86a-00d3a1114fed.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 707.340747] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4c880640-aaca-4ef6-a8b0-68b151e9a23f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.347761] env[69994]: DEBUG oslo_vmware.api [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Waiting for the task: (returnval){ [ 707.347761] env[69994]: value = "task-3241502" [ 707.347761] env[69994]: _type = "Task" [ 707.347761] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.357041] env[69994]: DEBUG oslo_vmware.api [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241502, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.381058] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c5fe9601-7ff8-47f3-840a-4fe60da93a99 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 707.573757] env[69994]: DEBUG nova.compute.manager [req-3ba033fd-6059-4b23-ad98-0f503ef1232d req-895c3207-25db-4ca7-b318-ebaf0a3d126c service nova] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Received event network-vif-deleted-e90f5624-00b3-4a23-bedc-663e89b666df {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 707.825875] env[69994]: ERROR nova.scheduler.client.report [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [req-cb941390-4feb-451e-8468-8f1472fe904a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 92ce3c95-4efe-4d04-802b-6b187afc5aa7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-cb941390-4feb-451e-8468-8f1472fe904a"}]} [ 707.847238] env[69994]: DEBUG nova.scheduler.client.report [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Refreshing inventories for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 707.866653] env[69994]: DEBUG oslo_vmware.api [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241502, 'name': Rename_Task, 'duration_secs': 0.260767} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.866938] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 707.868314] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-df3c8a6d-afbe-4c31-87b8-d11b243b0cf8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.872316] env[69994]: DEBUG nova.scheduler.client.report [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Updating ProviderTree inventory for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 707.872600] env[69994]: DEBUG nova.compute.provider_tree [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 707.886784] env[69994]: DEBUG oslo_vmware.api [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Waiting for the task: (returnval){ [ 707.886784] env[69994]: value = "task-3241503" [ 707.886784] env[69994]: _type = "Task" [ 707.886784] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.895297] env[69994]: DEBUG oslo_vmware.api [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241503, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.896984] env[69994]: DEBUG nova.scheduler.client.report [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Refreshing aggregate associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 707.915680] env[69994]: DEBUG nova.scheduler.client.report [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Refreshing trait associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 708.136566] env[69994]: DEBUG oslo_concurrency.lockutils [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "eff21ec5-a51d-4004-9edf-1891f706fe9c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 708.137360] env[69994]: DEBUG oslo_concurrency.lockutils [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "eff21ec5-a51d-4004-9edf-1891f706fe9c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 708.400509] env[69994]: DEBUG oslo_vmware.api [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241503, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.579260] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f12454c2-4dd5-46d9-bb1c-4a66e2f18d33 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.593124] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-724065d3-3f47-475a-9788-d707ea292aac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.628849] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c2287f0-ee25-4a12-b229-4284988c66a2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.637118] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18d65f7a-78a5-44cd-a466-59606a89b6e2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.652265] env[69994]: DEBUG nova.compute.provider_tree [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 708.900970] env[69994]: DEBUG oslo_vmware.api [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241503, 'name': PowerOnVM_Task, 'duration_secs': 0.781451} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.902374] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 708.902700] env[69994]: INFO nova.compute.manager [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Took 8.14 seconds to spawn the instance on the hypervisor. [ 708.906044] env[69994]: DEBUG nova.compute.manager [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 708.906917] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64cf62ba-2af1-4bdd-80d9-9f0dc8884db8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.111814] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Acquiring lock "df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 709.112199] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Lock "df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 709.181461] env[69994]: ERROR nova.scheduler.client.report [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [req-ba736156-1dbd-4a99-b2c2-4c8f554099cf] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 92ce3c95-4efe-4d04-802b-6b187afc5aa7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ba736156-1dbd-4a99-b2c2-4c8f554099cf"}]} [ 709.203650] env[69994]: DEBUG nova.scheduler.client.report [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Refreshing inventories for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 709.227897] env[69994]: DEBUG nova.scheduler.client.report [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Updating ProviderTree inventory for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 709.228409] env[69994]: DEBUG nova.compute.provider_tree [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 709.251017] env[69994]: DEBUG nova.scheduler.client.report [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Refreshing aggregate associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 709.274398] env[69994]: DEBUG nova.scheduler.client.report [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Refreshing trait associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 709.433881] env[69994]: INFO nova.compute.manager [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Took 41.57 seconds to build instance. [ 709.614610] env[69994]: DEBUG oslo_vmware.rw_handles [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52804a57-f9a3-60f3-426f-b8d52fda1a41/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 709.616032] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfe429d0-4394-4486-b600-989abfca29c3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.625022] env[69994]: DEBUG oslo_vmware.rw_handles [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52804a57-f9a3-60f3-426f-b8d52fda1a41/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 709.625491] env[69994]: ERROR oslo_vmware.rw_handles [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52804a57-f9a3-60f3-426f-b8d52fda1a41/disk-0.vmdk due to incomplete transfer. [ 709.625694] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-688b636c-15e3-4045-8ecc-8829bef19c93 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.643758] env[69994]: DEBUG oslo_vmware.rw_handles [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52804a57-f9a3-60f3-426f-b8d52fda1a41/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 709.643758] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Uploaded image 9d2b9980-3b7b-48d8-90f1-76869ae8c772 to the Glance image server {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 709.645643] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Destroying the VM {{(pid=69994) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 709.645827] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-625996eb-5477-4df1-b762-12760d1d697f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.652783] env[69994]: DEBUG oslo_vmware.api [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Waiting for the task: (returnval){ [ 709.652783] env[69994]: value = "task-3241504" [ 709.652783] env[69994]: _type = "Task" [ 709.652783] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.668967] env[69994]: DEBUG oslo_vmware.api [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241504, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.937145] env[69994]: DEBUG oslo_concurrency.lockutils [None req-648f5373-1eaa-4b07-be9f-fd8ef43bdc1c tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Lock "153f0ead-6e2f-4077-b86a-00d3a1114fed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.530s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 710.084429] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9661040a-011d-43a9-9368-6c9c36ec11ac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.093077] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94ce132e-de02-4662-86fe-7c293a10da3e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.130173] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef207d54-7959-459b-8f40-8a8470b7524a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.138527] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ce72e8c-f024-4e6c-897e-746bbb45c051 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.153483] env[69994]: DEBUG nova.compute.provider_tree [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 710.162965] env[69994]: DEBUG oslo_vmware.api [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241504, 'name': Destroy_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.443227] env[69994]: DEBUG nova.compute.manager [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 710.668247] env[69994]: DEBUG oslo_vmware.api [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241504, 'name': Destroy_Task, 'duration_secs': 0.799809} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.668530] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Destroyed the VM [ 710.668765] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Deleting Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 710.669023] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-50dc758f-5f4f-4bd2-a406-f58a0b01fe6d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.675524] env[69994]: DEBUG oslo_vmware.api [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Waiting for the task: (returnval){ [ 710.675524] env[69994]: value = "task-3241505" [ 710.675524] env[69994]: _type = "Task" [ 710.675524] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.682984] env[69994]: DEBUG oslo_vmware.api [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241505, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.702937] env[69994]: DEBUG nova.scheduler.client.report [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Updated inventory for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with generation 59 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 710.702937] env[69994]: DEBUG nova.compute.provider_tree [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Updating resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 generation from 59 to 60 during operation: update_inventory {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 710.702937] env[69994]: DEBUG nova.compute.provider_tree [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 710.965054] env[69994]: DEBUG oslo_concurrency.lockutils [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 711.184791] env[69994]: DEBUG oslo_vmware.api [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241505, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.206037] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 5.122s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 711.208745] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.266s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 711.210351] env[69994]: INFO nova.compute.claims [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 711.239676] env[69994]: INFO nova.scheduler.client.report [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Deleted allocations for instance 91bb882c-7b84-450f-bd03-91ea1ce739ce [ 711.685729] env[69994]: DEBUG oslo_vmware.api [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241505, 'name': RemoveSnapshot_Task} progress is 12%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.707039] env[69994]: DEBUG nova.compute.manager [None req-db9a6674-0560-4830-8810-6f77c97d7fb9 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 711.707190] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3b384a0-13f3-47fd-ad41-1a387ce560f1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.747132] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b6678466-a43b-4f12-a7b7-0cb1f63dba6b tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Lock "91bb882c-7b84-450f-bd03-91ea1ce739ce" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.723s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 712.190674] env[69994]: DEBUG oslo_vmware.api [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241505, 'name': RemoveSnapshot_Task} progress is 12%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.218109] env[69994]: INFO nova.compute.manager [None req-db9a6674-0560-4830-8810-6f77c97d7fb9 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] instance snapshotting [ 712.220811] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84b7ea98-52d3-4c93-b218-f7a37d24c242 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.246198] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae56a895-4d0c-4fde-9ff3-29bb0cce6c0b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.688929] env[69994]: DEBUG oslo_vmware.api [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241505, 'name': RemoveSnapshot_Task, 'duration_secs': 1.816008} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.689227] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Deleted Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 712.689884] env[69994]: INFO nova.compute.manager [None req-dd54e2ca-4da3-499c-9e70-a4f89da61c7e tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Took 20.33 seconds to snapshot the instance on the hypervisor. [ 712.759537] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-db9a6674-0560-4830-8810-6f77c97d7fb9 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Creating Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 712.760039] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ad8998d3-d8cf-42fd-bc4d-87125280bf69 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.767534] env[69994]: DEBUG oslo_vmware.api [None req-db9a6674-0560-4830-8810-6f77c97d7fb9 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Waiting for the task: (returnval){ [ 712.767534] env[69994]: value = "task-3241506" [ 712.767534] env[69994]: _type = "Task" [ 712.767534] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.778709] env[69994]: DEBUG oslo_vmware.api [None req-db9a6674-0560-4830-8810-6f77c97d7fb9 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241506, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.788072] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b117e222-666d-41e3-b1e1-62be838c68ff {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.792050] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ced4e8d-81a0-4020-911f-31937999e7ec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.821806] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-473cdd74-cb4a-4f79-8a18-4d8914090374 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.829073] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c661a8b2-1f93-4b8f-ae66-3bf0710f2b9f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.842974] env[69994]: DEBUG nova.compute.provider_tree [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 713.277988] env[69994]: DEBUG oslo_vmware.api [None req-db9a6674-0560-4830-8810-6f77c97d7fb9 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241506, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.346677] env[69994]: DEBUG nova.scheduler.client.report [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 713.384985] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Acquiring lock "dca638aa-c491-431f-a0e5-d02bd76705ad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 713.385249] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Lock "dca638aa-c491-431f-a0e5-d02bd76705ad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 713.778297] env[69994]: DEBUG oslo_vmware.api [None req-db9a6674-0560-4830-8810-6f77c97d7fb9 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241506, 'name': CreateSnapshot_Task, 'duration_secs': 0.82682} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.778550] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-db9a6674-0560-4830-8810-6f77c97d7fb9 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Created Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 713.779359] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3954881b-0929-4b9c-9571-4d8759813945 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.833585] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8ad0c885-f0ba-4c47-96b6-ffd99ef4815d tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Acquiring lock "aeb7928a-8307-49e7-b019-a4c674e6369a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 713.834096] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8ad0c885-f0ba-4c47-96b6-ffd99ef4815d tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Lock "aeb7928a-8307-49e7-b019-a4c674e6369a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 713.834382] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8ad0c885-f0ba-4c47-96b6-ffd99ef4815d tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Acquiring lock "aeb7928a-8307-49e7-b019-a4c674e6369a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 713.834588] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8ad0c885-f0ba-4c47-96b6-ffd99ef4815d tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Lock "aeb7928a-8307-49e7-b019-a4c674e6369a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 713.834758] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8ad0c885-f0ba-4c47-96b6-ffd99ef4815d tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Lock "aeb7928a-8307-49e7-b019-a4c674e6369a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 713.837096] env[69994]: INFO nova.compute.manager [None req-8ad0c885-f0ba-4c47-96b6-ffd99ef4815d tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Terminating instance [ 713.852886] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.644s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 713.853723] env[69994]: DEBUG nova.compute.manager [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 713.855638] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.274s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 713.857085] env[69994]: INFO nova.compute.claims [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 714.300956] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-db9a6674-0560-4830-8810-6f77c97d7fb9 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Creating linked-clone VM from snapshot {{(pid=69994) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 714.301648] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-549b0beb-869f-4df0-af97-94498806b33b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.310785] env[69994]: DEBUG oslo_vmware.api [None req-db9a6674-0560-4830-8810-6f77c97d7fb9 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Waiting for the task: (returnval){ [ 714.310785] env[69994]: value = "task-3241507" [ 714.310785] env[69994]: _type = "Task" [ 714.310785] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.321141] env[69994]: DEBUG oslo_vmware.api [None req-db9a6674-0560-4830-8810-6f77c97d7fb9 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241507, 'name': CloneVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.343055] env[69994]: DEBUG nova.compute.manager [None req-8ad0c885-f0ba-4c47-96b6-ffd99ef4815d tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 714.343055] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8ad0c885-f0ba-4c47-96b6-ffd99ef4815d tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 714.343055] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-157eb664-dac1-4bd0-9f2c-6565f2bf57ad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.350447] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ad0c885-f0ba-4c47-96b6-ffd99ef4815d tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 714.350560] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e438773a-d309-4169-b604-8b7e96e7863a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.356208] env[69994]: DEBUG oslo_vmware.api [None req-8ad0c885-f0ba-4c47-96b6-ffd99ef4815d tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Waiting for the task: (returnval){ [ 714.356208] env[69994]: value = "task-3241508" [ 714.356208] env[69994]: _type = "Task" [ 714.356208] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.362985] env[69994]: DEBUG nova.compute.utils [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 714.370500] env[69994]: DEBUG nova.compute.manager [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 714.370932] env[69994]: DEBUG nova.network.neutron [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 714.372808] env[69994]: DEBUG oslo_vmware.api [None req-8ad0c885-f0ba-4c47-96b6-ffd99ef4815d tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241508, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.433918] env[69994]: DEBUG nova.policy [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7048717682204eb59697716a973c356b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '91fa55bf90ff43a8b255a1e2fa2c22be', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 714.823646] env[69994]: DEBUG oslo_vmware.api [None req-db9a6674-0560-4830-8810-6f77c97d7fb9 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241507, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.826350] env[69994]: DEBUG nova.network.neutron [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Successfully created port: 5849490a-d0e4-4330-b929-ead7f7ce0fd7 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 714.865819] env[69994]: DEBUG oslo_vmware.api [None req-8ad0c885-f0ba-4c47-96b6-ffd99ef4815d tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241508, 'name': PowerOffVM_Task, 'duration_secs': 0.183817} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.866130] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ad0c885-f0ba-4c47-96b6-ffd99ef4815d tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 714.866352] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8ad0c885-f0ba-4c47-96b6-ffd99ef4815d tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 714.866613] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a5342ba4-298a-4efc-a289-76c3b0cc98d4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.871055] env[69994]: DEBUG nova.compute.manager [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 714.939877] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8ad0c885-f0ba-4c47-96b6-ffd99ef4815d tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 714.940284] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8ad0c885-f0ba-4c47-96b6-ffd99ef4815d tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 714.940630] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ad0c885-f0ba-4c47-96b6-ffd99ef4815d tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Deleting the datastore file [datastore2] aeb7928a-8307-49e7-b019-a4c674e6369a {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 714.941042] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b09a0539-0bf1-4919-8408-d189311a7c5a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.948291] env[69994]: DEBUG oslo_vmware.api [None req-8ad0c885-f0ba-4c47-96b6-ffd99ef4815d tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Waiting for the task: (returnval){ [ 714.948291] env[69994]: value = "task-3241510" [ 714.948291] env[69994]: _type = "Task" [ 714.948291] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.956856] env[69994]: DEBUG oslo_vmware.api [None req-8ad0c885-f0ba-4c47-96b6-ffd99ef4815d tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241510, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.326591] env[69994]: DEBUG oslo_vmware.api [None req-db9a6674-0560-4830-8810-6f77c97d7fb9 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241507, 'name': CloneVM_Task} progress is 95%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.458741] env[69994]: DEBUG oslo_vmware.api [None req-8ad0c885-f0ba-4c47-96b6-ffd99ef4815d tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Task: {'id': task-3241510, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167876} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.461360] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ad0c885-f0ba-4c47-96b6-ffd99ef4815d tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 715.461709] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8ad0c885-f0ba-4c47-96b6-ffd99ef4815d tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 715.462139] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8ad0c885-f0ba-4c47-96b6-ffd99ef4815d tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 715.462457] env[69994]: INFO nova.compute.manager [None req-8ad0c885-f0ba-4c47-96b6-ffd99ef4815d tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Took 1.12 seconds to destroy the instance on the hypervisor. [ 715.462833] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8ad0c885-f0ba-4c47-96b6-ffd99ef4815d tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 715.463381] env[69994]: DEBUG nova.compute.manager [-] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 715.463610] env[69994]: DEBUG nova.network.neutron [-] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 715.514397] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0401a675-15c2-453c-8661-b4b8900725ec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.522144] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f5cf2e5-bdf1-4ec2-8f36-40e944a1a10f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.557207] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a59cf563-8c47-410e-b543-262bffacdc61 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.564756] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7f868c4-7c69-4e21-95f9-a43e3dfb1c17 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.579360] env[69994]: DEBUG nova.compute.provider_tree [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 715.822658] env[69994]: DEBUG oslo_vmware.api [None req-db9a6674-0560-4830-8810-6f77c97d7fb9 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241507, 'name': CloneVM_Task, 'duration_secs': 1.207809} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.822923] env[69994]: INFO nova.virt.vmwareapi.vmops [None req-db9a6674-0560-4830-8810-6f77c97d7fb9 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Created linked-clone VM from snapshot [ 715.823650] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f5f46a3-ef52-4de7-81e8-33aaf40d4566 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.830613] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-db9a6674-0560-4830-8810-6f77c97d7fb9 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Uploading image 40fec83b-63f3-470e-97c8-2b8af2ced26e {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 715.856274] env[69994]: DEBUG oslo_vmware.rw_handles [None req-db9a6674-0560-4830-8810-6f77c97d7fb9 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 715.856274] env[69994]: value = "vm-647819" [ 715.856274] env[69994]: _type = "VirtualMachine" [ 715.856274] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 715.856539] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-febc78e3-e567-4e61-8017-28b1b25f0f1f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.864477] env[69994]: DEBUG oslo_vmware.rw_handles [None req-db9a6674-0560-4830-8810-6f77c97d7fb9 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Lease: (returnval){ [ 715.864477] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5240bafd-81b4-bdaf-a9cd-a310eafd1051" [ 715.864477] env[69994]: _type = "HttpNfcLease" [ 715.864477] env[69994]: } obtained for exporting VM: (result){ [ 715.864477] env[69994]: value = "vm-647819" [ 715.864477] env[69994]: _type = "VirtualMachine" [ 715.864477] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 715.865963] env[69994]: DEBUG oslo_vmware.api [None req-db9a6674-0560-4830-8810-6f77c97d7fb9 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Waiting for the lease: (returnval){ [ 715.865963] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5240bafd-81b4-bdaf-a9cd-a310eafd1051" [ 715.865963] env[69994]: _type = "HttpNfcLease" [ 715.865963] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 715.870592] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 715.870592] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5240bafd-81b4-bdaf-a9cd-a310eafd1051" [ 715.870592] env[69994]: _type = "HttpNfcLease" [ 715.870592] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 715.883893] env[69994]: DEBUG nova.compute.manager [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 715.915735] env[69994]: DEBUG nova.virt.hardware [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 715.915970] env[69994]: DEBUG nova.virt.hardware [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 715.916213] env[69994]: DEBUG nova.virt.hardware [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 715.916404] env[69994]: DEBUG nova.virt.hardware [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 715.916548] env[69994]: DEBUG nova.virt.hardware [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 715.916694] env[69994]: DEBUG nova.virt.hardware [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 715.916898] env[69994]: DEBUG nova.virt.hardware [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 715.917175] env[69994]: DEBUG nova.virt.hardware [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 715.917255] env[69994]: DEBUG nova.virt.hardware [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 715.917371] env[69994]: DEBUG nova.virt.hardware [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 715.917536] env[69994]: DEBUG nova.virt.hardware [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 715.918443] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db305f66-2025-40ed-91e1-12bdc0bd291c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.926086] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-642759bf-6da8-4d34-a2d0-ee701c7b65a2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.082866] env[69994]: DEBUG nova.scheduler.client.report [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 716.133863] env[69994]: DEBUG nova.compute.manager [req-e6b67643-60b1-42b7-bfe8-c3da4e4b989b req-2689602e-16de-4506-b512-d56a73029cd4 service nova] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Received event network-vif-deleted-23ce404a-01b4-4000-91a5-8532d84ccfff {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 716.133863] env[69994]: INFO nova.compute.manager [req-e6b67643-60b1-42b7-bfe8-c3da4e4b989b req-2689602e-16de-4506-b512-d56a73029cd4 service nova] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Neutron deleted interface 23ce404a-01b4-4000-91a5-8532d84ccfff; detaching it from the instance and deleting it from the info cache [ 716.133863] env[69994]: DEBUG nova.network.neutron [req-e6b67643-60b1-42b7-bfe8-c3da4e4b989b req-2689602e-16de-4506-b512-d56a73029cd4 service nova] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.374562] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 716.374562] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5240bafd-81b4-bdaf-a9cd-a310eafd1051" [ 716.374562] env[69994]: _type = "HttpNfcLease" [ 716.374562] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 716.375073] env[69994]: DEBUG oslo_vmware.rw_handles [None req-db9a6674-0560-4830-8810-6f77c97d7fb9 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 716.375073] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5240bafd-81b4-bdaf-a9cd-a310eafd1051" [ 716.375073] env[69994]: _type = "HttpNfcLease" [ 716.375073] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 716.376262] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e167ce1-25a8-4c20-a99a-84d287e63181 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.386487] env[69994]: DEBUG oslo_vmware.rw_handles [None req-db9a6674-0560-4830-8810-6f77c97d7fb9 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5279313f-f0c3-4e6b-49de-7dc4881e6438/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 716.386756] env[69994]: DEBUG oslo_vmware.rw_handles [None req-db9a6674-0560-4830-8810-6f77c97d7fb9 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5279313f-f0c3-4e6b-49de-7dc4881e6438/disk-0.vmdk for reading. {{(pid=69994) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 716.482848] env[69994]: DEBUG nova.network.neutron [-] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.487624] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-26b6e655-bdac-43c0-9927-f6eddc9065c3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.589651] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.734s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 716.590212] env[69994]: DEBUG nova.compute.manager [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 716.597016] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 36.831s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 716.597016] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 716.597016] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69994) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 716.597016] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.305s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 716.599669] env[69994]: INFO nova.compute.claims [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 716.603282] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d340fc08-4e8b-439d-bded-94d99a87f852 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.614271] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27477c53-a776-481f-b93e-740f7d08f792 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.632601] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdf982e8-b3d5-41c0-98dd-b39673f86fa9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.639858] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-219e1161-2a83-4a2c-b153-f11a61f44f3a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.642086] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd662ac5-923d-460b-9e4c-c04afb8ccb64 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.678755] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5bbdf1b-57df-4aa1-b066-1dcf7b8e19fe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.691038] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178496MB free_disk=120GB free_vcpus=48 pci_devices=None {{(pid=69994) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 716.691238] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 716.712761] env[69994]: DEBUG nova.compute.manager [req-e6b67643-60b1-42b7-bfe8-c3da4e4b989b req-2689602e-16de-4506-b512-d56a73029cd4 service nova] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Detach interface failed, port_id=23ce404a-01b4-4000-91a5-8532d84ccfff, reason: Instance aeb7928a-8307-49e7-b019-a4c674e6369a could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 716.739896] env[69994]: DEBUG nova.network.neutron [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Successfully updated port: 5849490a-d0e4-4330-b929-ead7f7ce0fd7 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 716.988149] env[69994]: INFO nova.compute.manager [-] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Took 1.52 seconds to deallocate network for instance. [ 717.104695] env[69994]: DEBUG nova.compute.utils [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 717.106324] env[69994]: DEBUG nova.compute.manager [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 717.106511] env[69994]: DEBUG nova.network.neutron [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 717.174503] env[69994]: DEBUG nova.policy [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7048717682204eb59697716a973c356b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '91fa55bf90ff43a8b255a1e2fa2c22be', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 717.249351] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquiring lock "refresh_cache-493c2d85-eef5-44ae-acfc-2744685135ca" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.249351] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquired lock "refresh_cache-493c2d85-eef5-44ae-acfc-2744685135ca" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 717.249351] env[69994]: DEBUG nova.network.neutron [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 717.495198] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8ad0c885-f0ba-4c47-96b6-ffd99ef4815d tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 717.568488] env[69994]: DEBUG nova.network.neutron [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Successfully created port: c71264f5-b3f6-418a-8777-e4ef4b9895e0 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 717.609536] env[69994]: DEBUG nova.compute.manager [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 717.795876] env[69994]: DEBUG nova.network.neutron [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 718.030142] env[69994]: DEBUG nova.network.neutron [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Updating instance_info_cache with network_info: [{"id": "5849490a-d0e4-4330-b929-ead7f7ce0fd7", "address": "fa:16:3e:5f:cd:2e", "network": {"id": "200c7470-668b-414c-81ff-38c361914b33", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1520904988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91fa55bf90ff43a8b255a1e2fa2c22be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5849490a-d0", "ovs_interfaceid": "5849490a-d0e4-4330-b929-ead7f7ce0fd7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 718.171090] env[69994]: DEBUG nova.compute.manager [req-1b30b870-2b5b-4f8f-9fc4-f0c4dfcf97ec req-d9b7c541-ac90-41e1-b7cd-54b6d9f96c97 service nova] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Received event network-vif-plugged-5849490a-d0e4-4330-b929-ead7f7ce0fd7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 718.171301] env[69994]: DEBUG oslo_concurrency.lockutils [req-1b30b870-2b5b-4f8f-9fc4-f0c4dfcf97ec req-d9b7c541-ac90-41e1-b7cd-54b6d9f96c97 service nova] Acquiring lock "493c2d85-eef5-44ae-acfc-2744685135ca-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 718.171672] env[69994]: DEBUG oslo_concurrency.lockutils [req-1b30b870-2b5b-4f8f-9fc4-f0c4dfcf97ec req-d9b7c541-ac90-41e1-b7cd-54b6d9f96c97 service nova] Lock "493c2d85-eef5-44ae-acfc-2744685135ca-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 718.171935] env[69994]: DEBUG oslo_concurrency.lockutils [req-1b30b870-2b5b-4f8f-9fc4-f0c4dfcf97ec req-d9b7c541-ac90-41e1-b7cd-54b6d9f96c97 service nova] Lock "493c2d85-eef5-44ae-acfc-2744685135ca-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 718.172223] env[69994]: DEBUG nova.compute.manager [req-1b30b870-2b5b-4f8f-9fc4-f0c4dfcf97ec req-d9b7c541-ac90-41e1-b7cd-54b6d9f96c97 service nova] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] No waiting events found dispatching network-vif-plugged-5849490a-d0e4-4330-b929-ead7f7ce0fd7 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 718.172489] env[69994]: WARNING nova.compute.manager [req-1b30b870-2b5b-4f8f-9fc4-f0c4dfcf97ec req-d9b7c541-ac90-41e1-b7cd-54b6d9f96c97 service nova] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Received unexpected event network-vif-plugged-5849490a-d0e4-4330-b929-ead7f7ce0fd7 for instance with vm_state building and task_state spawning. [ 718.172772] env[69994]: DEBUG nova.compute.manager [req-1b30b870-2b5b-4f8f-9fc4-f0c4dfcf97ec req-d9b7c541-ac90-41e1-b7cd-54b6d9f96c97 service nova] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Received event network-changed-5849490a-d0e4-4330-b929-ead7f7ce0fd7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 718.172924] env[69994]: DEBUG nova.compute.manager [req-1b30b870-2b5b-4f8f-9fc4-f0c4dfcf97ec req-d9b7c541-ac90-41e1-b7cd-54b6d9f96c97 service nova] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Refreshing instance network info cache due to event network-changed-5849490a-d0e4-4330-b929-ead7f7ce0fd7. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 718.173109] env[69994]: DEBUG oslo_concurrency.lockutils [req-1b30b870-2b5b-4f8f-9fc4-f0c4dfcf97ec req-d9b7c541-ac90-41e1-b7cd-54b6d9f96c97 service nova] Acquiring lock "refresh_cache-493c2d85-eef5-44ae-acfc-2744685135ca" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.302723] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c57da09-843d-4503-abf6-9a0e19a43f95 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.311297] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd75fd26-8f65-4ba1-a53f-22277c1a1248 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.342957] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-218207ad-524e-4b4d-b5d2-d2ed6e7246ee {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.350853] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e6102ef-3e95-47d6-a508-d91e0b3ecbce {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.364637] env[69994]: DEBUG nova.compute.provider_tree [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 718.531270] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Releasing lock "refresh_cache-493c2d85-eef5-44ae-acfc-2744685135ca" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 718.531965] env[69994]: DEBUG nova.compute.manager [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Instance network_info: |[{"id": "5849490a-d0e4-4330-b929-ead7f7ce0fd7", "address": "fa:16:3e:5f:cd:2e", "network": {"id": "200c7470-668b-414c-81ff-38c361914b33", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1520904988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91fa55bf90ff43a8b255a1e2fa2c22be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5849490a-d0", "ovs_interfaceid": "5849490a-d0e4-4330-b929-ead7f7ce0fd7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 718.532186] env[69994]: DEBUG oslo_concurrency.lockutils [req-1b30b870-2b5b-4f8f-9fc4-f0c4dfcf97ec req-d9b7c541-ac90-41e1-b7cd-54b6d9f96c97 service nova] Acquired lock "refresh_cache-493c2d85-eef5-44ae-acfc-2744685135ca" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 718.532186] env[69994]: DEBUG nova.network.neutron [req-1b30b870-2b5b-4f8f-9fc4-f0c4dfcf97ec req-d9b7c541-ac90-41e1-b7cd-54b6d9f96c97 service nova] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Refreshing network info cache for port 5849490a-d0e4-4330-b929-ead7f7ce0fd7 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 718.533472] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5f:cd:2e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5849490a-d0e4-4330-b929-ead7f7ce0fd7', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 718.542467] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Creating folder: Project (91fa55bf90ff43a8b255a1e2fa2c22be). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 718.543027] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7c706ab3-c892-4e7b-93ed-f98608b47d12 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.554828] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Created folder: Project (91fa55bf90ff43a8b255a1e2fa2c22be) in parent group-v647729. [ 718.555046] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Creating folder: Instances. Parent ref: group-v647820. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 718.555288] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8901226a-ef91-465f-8cfe-854e07d497a3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.564257] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Created folder: Instances in parent group-v647820. [ 718.564370] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 718.564562] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 718.564770] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1ca42037-72aa-40ee-9725-f48620a90ded {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.586514] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 718.586514] env[69994]: value = "task-3241514" [ 718.586514] env[69994]: _type = "Task" [ 718.586514] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.594965] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241514, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.620228] env[69994]: DEBUG nova.compute.manager [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 718.647491] env[69994]: DEBUG nova.virt.hardware [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 718.647826] env[69994]: DEBUG nova.virt.hardware [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 718.647999] env[69994]: DEBUG nova.virt.hardware [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 718.648201] env[69994]: DEBUG nova.virt.hardware [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 718.648350] env[69994]: DEBUG nova.virt.hardware [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 718.648498] env[69994]: DEBUG nova.virt.hardware [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 718.648907] env[69994]: DEBUG nova.virt.hardware [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 718.648907] env[69994]: DEBUG nova.virt.hardware [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 718.649079] env[69994]: DEBUG nova.virt.hardware [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 718.649249] env[69994]: DEBUG nova.virt.hardware [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 718.649422] env[69994]: DEBUG nova.virt.hardware [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 718.650361] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5496d502-76f4-495d-9b54-0fb4950e4765 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.659676] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d4f8af9-cf7e-499d-ab7a-785ec373df9e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.867606] env[69994]: DEBUG nova.scheduler.client.report [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 719.096704] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241514, 'name': CreateVM_Task, 'duration_secs': 0.354958} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.097184] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 719.097616] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.097847] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 719.098214] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 719.098488] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e054894-906d-4a15-9caa-80ed660c7cca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.104367] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 719.104367] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]527e72b4-8469-03a6-4d2b-5eecbd150a74" [ 719.104367] env[69994]: _type = "Task" [ 719.104367] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.112819] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]527e72b4-8469-03a6-4d2b-5eecbd150a74, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.376018] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.778s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 719.376018] env[69994]: DEBUG nova.compute.manager [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 719.377159] env[69994]: DEBUG oslo_concurrency.lockutils [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.471s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 719.379188] env[69994]: INFO nova.compute.claims [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 719.509284] env[69994]: DEBUG nova.network.neutron [req-1b30b870-2b5b-4f8f-9fc4-f0c4dfcf97ec req-d9b7c541-ac90-41e1-b7cd-54b6d9f96c97 service nova] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Updated VIF entry in instance network info cache for port 5849490a-d0e4-4330-b929-ead7f7ce0fd7. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 719.509284] env[69994]: DEBUG nova.network.neutron [req-1b30b870-2b5b-4f8f-9fc4-f0c4dfcf97ec req-d9b7c541-ac90-41e1-b7cd-54b6d9f96c97 service nova] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Updating instance_info_cache with network_info: [{"id": "5849490a-d0e4-4330-b929-ead7f7ce0fd7", "address": "fa:16:3e:5f:cd:2e", "network": {"id": "200c7470-668b-414c-81ff-38c361914b33", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1520904988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91fa55bf90ff43a8b255a1e2fa2c22be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5849490a-d0", "ovs_interfaceid": "5849490a-d0e4-4330-b929-ead7f7ce0fd7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.618575] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]527e72b4-8469-03a6-4d2b-5eecbd150a74, 'name': SearchDatastore_Task, 'duration_secs': 0.010787} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.618575] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 719.618575] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 719.618575] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.618983] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 719.618983] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 719.618983] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-728aa386-6eed-4f9e-8696-7c837ca2829c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.625127] env[69994]: DEBUG nova.network.neutron [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Successfully updated port: c71264f5-b3f6-418a-8777-e4ef4b9895e0 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 719.627631] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 719.627986] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 719.628829] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5fe10815-fc08-4238-bfd0-a64c836621a2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.635502] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 719.635502] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]527a7bcc-6869-b1c5-c468-af42e066b609" [ 719.635502] env[69994]: _type = "Task" [ 719.635502] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.645512] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]527a7bcc-6869-b1c5-c468-af42e066b609, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.885900] env[69994]: DEBUG nova.compute.utils [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 719.890025] env[69994]: DEBUG nova.compute.manager [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 719.890025] env[69994]: DEBUG nova.network.neutron [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 719.970959] env[69994]: DEBUG nova.policy [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '35d4b423225340caa7052af3c1d6a81f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dfa35e60b54941dfbfb8671758ccd039', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 720.011146] env[69994]: DEBUG oslo_concurrency.lockutils [req-1b30b870-2b5b-4f8f-9fc4-f0c4dfcf97ec req-d9b7c541-ac90-41e1-b7cd-54b6d9f96c97 service nova] Releasing lock "refresh_cache-493c2d85-eef5-44ae-acfc-2744685135ca" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 720.128318] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquiring lock "refresh_cache-e8b4640f-302d-43cd-a654-c42f9cb34766" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 720.128318] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquired lock "refresh_cache-e8b4640f-302d-43cd-a654-c42f9cb34766" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 720.128318] env[69994]: DEBUG nova.network.neutron [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 720.150022] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]527a7bcc-6869-b1c5-c468-af42e066b609, 'name': SearchDatastore_Task, 'duration_secs': 0.011419} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.150022] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bcf09c74-52fb-4dda-a701-870173e5ed5f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.155417] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 720.155417] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52377317-3111-76e6-af0b-b9f2a5bd1f1b" [ 720.155417] env[69994]: _type = "Task" [ 720.155417] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.164683] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52377317-3111-76e6-af0b-b9f2a5bd1f1b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.223472] env[69994]: DEBUG nova.compute.manager [req-7977d645-f712-453f-9a05-2bae312c33dd req-56fa7ea7-17dc-475b-be29-212bd2949594 service nova] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Received event network-vif-plugged-c71264f5-b3f6-418a-8777-e4ef4b9895e0 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 720.223472] env[69994]: DEBUG oslo_concurrency.lockutils [req-7977d645-f712-453f-9a05-2bae312c33dd req-56fa7ea7-17dc-475b-be29-212bd2949594 service nova] Acquiring lock "e8b4640f-302d-43cd-a654-c42f9cb34766-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 720.223472] env[69994]: DEBUG oslo_concurrency.lockutils [req-7977d645-f712-453f-9a05-2bae312c33dd req-56fa7ea7-17dc-475b-be29-212bd2949594 service nova] Lock "e8b4640f-302d-43cd-a654-c42f9cb34766-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 720.224345] env[69994]: DEBUG oslo_concurrency.lockutils [req-7977d645-f712-453f-9a05-2bae312c33dd req-56fa7ea7-17dc-475b-be29-212bd2949594 service nova] Lock "e8b4640f-302d-43cd-a654-c42f9cb34766-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 720.224702] env[69994]: DEBUG nova.compute.manager [req-7977d645-f712-453f-9a05-2bae312c33dd req-56fa7ea7-17dc-475b-be29-212bd2949594 service nova] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] No waiting events found dispatching network-vif-plugged-c71264f5-b3f6-418a-8777-e4ef4b9895e0 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 720.225041] env[69994]: WARNING nova.compute.manager [req-7977d645-f712-453f-9a05-2bae312c33dd req-56fa7ea7-17dc-475b-be29-212bd2949594 service nova] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Received unexpected event network-vif-plugged-c71264f5-b3f6-418a-8777-e4ef4b9895e0 for instance with vm_state building and task_state spawning. [ 720.225690] env[69994]: DEBUG nova.compute.manager [req-7977d645-f712-453f-9a05-2bae312c33dd req-56fa7ea7-17dc-475b-be29-212bd2949594 service nova] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Received event network-changed-c71264f5-b3f6-418a-8777-e4ef4b9895e0 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 720.226020] env[69994]: DEBUG nova.compute.manager [req-7977d645-f712-453f-9a05-2bae312c33dd req-56fa7ea7-17dc-475b-be29-212bd2949594 service nova] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Refreshing instance network info cache due to event network-changed-c71264f5-b3f6-418a-8777-e4ef4b9895e0. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 720.226340] env[69994]: DEBUG oslo_concurrency.lockutils [req-7977d645-f712-453f-9a05-2bae312c33dd req-56fa7ea7-17dc-475b-be29-212bd2949594 service nova] Acquiring lock "refresh_cache-e8b4640f-302d-43cd-a654-c42f9cb34766" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 720.338238] env[69994]: DEBUG nova.network.neutron [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Successfully created port: 18ed1bc7-f241-4d6e-83f9-4df1b8b70c45 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 720.392259] env[69994]: DEBUG nova.compute.manager [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 720.665199] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52377317-3111-76e6-af0b-b9f2a5bd1f1b, 'name': SearchDatastore_Task, 'duration_secs': 0.009763} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.667864] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 720.668154] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 493c2d85-eef5-44ae-acfc-2744685135ca/493c2d85-eef5-44ae-acfc-2744685135ca.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 720.668610] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3a815340-3b09-495a-bfbd-5cf4da5a5e28 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.675568] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 720.675568] env[69994]: value = "task-3241515" [ 720.675568] env[69994]: _type = "Task" [ 720.675568] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.684940] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241515, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.696126] env[69994]: DEBUG nova.network.neutron [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 720.954366] env[69994]: DEBUG nova.network.neutron [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Updating instance_info_cache with network_info: [{"id": "c71264f5-b3f6-418a-8777-e4ef4b9895e0", "address": "fa:16:3e:7d:ef:d9", "network": {"id": "200c7470-668b-414c-81ff-38c361914b33", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1520904988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91fa55bf90ff43a8b255a1e2fa2c22be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc71264f5-b3", "ovs_interfaceid": "c71264f5-b3f6-418a-8777-e4ef4b9895e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 721.031544] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bdec776-4b0e-4d19-a4cc-831155bbe5dc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.040204] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f4d48c8-0a79-4c71-95c3-30286fe262a9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.071318] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dab57e2b-cde6-45a7-9b28-9471388a4736 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.079455] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00da91c7-1821-4415-ad4c-4dc74a17db98 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.093663] env[69994]: DEBUG nova.compute.provider_tree [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 721.184830] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241515, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.458947} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.185088] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 493c2d85-eef5-44ae-acfc-2744685135ca/493c2d85-eef5-44ae-acfc-2744685135ca.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 721.185301] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 721.185550] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d94e466d-c81d-4011-9fa3-d55d57db85ee {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.191153] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 721.191153] env[69994]: value = "task-3241516" [ 721.191153] env[69994]: _type = "Task" [ 721.191153] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.199466] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241516, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.404905] env[69994]: DEBUG nova.compute.manager [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 721.428157] env[69994]: DEBUG nova.virt.hardware [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 721.428450] env[69994]: DEBUG nova.virt.hardware [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 721.428607] env[69994]: DEBUG nova.virt.hardware [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 721.428788] env[69994]: DEBUG nova.virt.hardware [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 721.428932] env[69994]: DEBUG nova.virt.hardware [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 721.429098] env[69994]: DEBUG nova.virt.hardware [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 721.429308] env[69994]: DEBUG nova.virt.hardware [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 721.429465] env[69994]: DEBUG nova.virt.hardware [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 721.429627] env[69994]: DEBUG nova.virt.hardware [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 721.429789] env[69994]: DEBUG nova.virt.hardware [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 721.429962] env[69994]: DEBUG nova.virt.hardware [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 721.430854] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feeb1f0c-dac1-4a69-8c4f-fbcbc00eeeb8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.439440] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c1d16db-999d-4463-9864-645097145743 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.457199] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Releasing lock "refresh_cache-e8b4640f-302d-43cd-a654-c42f9cb34766" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 721.457498] env[69994]: DEBUG nova.compute.manager [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Instance network_info: |[{"id": "c71264f5-b3f6-418a-8777-e4ef4b9895e0", "address": "fa:16:3e:7d:ef:d9", "network": {"id": "200c7470-668b-414c-81ff-38c361914b33", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1520904988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91fa55bf90ff43a8b255a1e2fa2c22be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc71264f5-b3", "ovs_interfaceid": "c71264f5-b3f6-418a-8777-e4ef4b9895e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 721.457832] env[69994]: DEBUG oslo_concurrency.lockutils [req-7977d645-f712-453f-9a05-2bae312c33dd req-56fa7ea7-17dc-475b-be29-212bd2949594 service nova] Acquired lock "refresh_cache-e8b4640f-302d-43cd-a654-c42f9cb34766" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 721.458030] env[69994]: DEBUG nova.network.neutron [req-7977d645-f712-453f-9a05-2bae312c33dd req-56fa7ea7-17dc-475b-be29-212bd2949594 service nova] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Refreshing network info cache for port c71264f5-b3f6-418a-8777-e4ef4b9895e0 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 721.459278] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7d:ef:d9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c71264f5-b3f6-418a-8777-e4ef4b9895e0', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 721.466635] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 721.467103] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 721.467912] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2d57a928-77e2-4f1e-bebe-6a20408efa39 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.487285] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 721.487285] env[69994]: value = "task-3241517" [ 721.487285] env[69994]: _type = "Task" [ 721.487285] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.495546] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241517, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.598593] env[69994]: DEBUG nova.scheduler.client.report [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 721.703388] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241516, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063765} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.703771] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 721.704724] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7206e29d-8a79-4819-af4f-71438c7c93a0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.730844] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Reconfiguring VM instance instance-0000001e to attach disk [datastore1] 493c2d85-eef5-44ae-acfc-2744685135ca/493c2d85-eef5-44ae-acfc-2744685135ca.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 721.731237] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8f406c4e-af9a-4351-9b19-afc735d4e3c0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.752793] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 721.752793] env[69994]: value = "task-3241518" [ 721.752793] env[69994]: _type = "Task" [ 721.752793] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.761421] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241518, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.001431] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241517, 'name': CreateVM_Task, 'duration_secs': 0.369554} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.001639] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 722.002383] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 722.002588] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 722.003042] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 722.003269] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce1e17f9-0c65-4e47-a228-496ace95ea3e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.008961] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 722.008961] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5219a799-2905-7940-fa59-43aa1f906e97" [ 722.008961] env[69994]: _type = "Task" [ 722.008961] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.028624] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5219a799-2905-7940-fa59-43aa1f906e97, 'name': SearchDatastore_Task, 'duration_secs': 0.012441} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.028952] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 722.029884] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 722.029884] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 722.029884] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 722.029884] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 722.030143] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a9acd736-4aa5-4d3a-b26d-57c97c5e1204 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.043286] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 722.043453] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 722.044319] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85b35519-148a-4c68-a31b-e3c73b64db6a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.050104] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 722.050104] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]529c3d0f-6eee-94a6-9905-536de41f2ce1" [ 722.050104] env[69994]: _type = "Task" [ 722.050104] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.062911] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]529c3d0f-6eee-94a6-9905-536de41f2ce1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.106073] env[69994]: DEBUG oslo_concurrency.lockutils [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.726s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 722.106073] env[69994]: DEBUG nova.compute.manager [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 722.108422] env[69994]: DEBUG oslo_concurrency.lockutils [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.994s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 722.108954] env[69994]: DEBUG nova.objects.instance [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Lazy-loading 'resources' on Instance uuid e9bc15f9-e957-487f-b8d5-d1332b185dcf {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 722.251179] env[69994]: DEBUG nova.compute.manager [req-d101b9fc-5cbd-4bc6-ad4b-b5529c663673 req-b7d5e30e-d2d0-4d08-876f-bcacde74fb70 service nova] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Received event network-vif-plugged-18ed1bc7-f241-4d6e-83f9-4df1b8b70c45 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 722.251179] env[69994]: DEBUG oslo_concurrency.lockutils [req-d101b9fc-5cbd-4bc6-ad4b-b5529c663673 req-b7d5e30e-d2d0-4d08-876f-bcacde74fb70 service nova] Acquiring lock "ce6f9a88-faa8-442e-8b48-64979dd2d03e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 722.251179] env[69994]: DEBUG oslo_concurrency.lockutils [req-d101b9fc-5cbd-4bc6-ad4b-b5529c663673 req-b7d5e30e-d2d0-4d08-876f-bcacde74fb70 service nova] Lock "ce6f9a88-faa8-442e-8b48-64979dd2d03e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 722.251179] env[69994]: DEBUG oslo_concurrency.lockutils [req-d101b9fc-5cbd-4bc6-ad4b-b5529c663673 req-b7d5e30e-d2d0-4d08-876f-bcacde74fb70 service nova] Lock "ce6f9a88-faa8-442e-8b48-64979dd2d03e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 722.251179] env[69994]: DEBUG nova.compute.manager [req-d101b9fc-5cbd-4bc6-ad4b-b5529c663673 req-b7d5e30e-d2d0-4d08-876f-bcacde74fb70 service nova] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] No waiting events found dispatching network-vif-plugged-18ed1bc7-f241-4d6e-83f9-4df1b8b70c45 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 722.251544] env[69994]: WARNING nova.compute.manager [req-d101b9fc-5cbd-4bc6-ad4b-b5529c663673 req-b7d5e30e-d2d0-4d08-876f-bcacde74fb70 service nova] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Received unexpected event network-vif-plugged-18ed1bc7-f241-4d6e-83f9-4df1b8b70c45 for instance with vm_state building and task_state spawning. [ 722.266289] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241518, 'name': ReconfigVM_Task, 'duration_secs': 0.30382} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.269342] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Reconfigured VM instance instance-0000001e to attach disk [datastore1] 493c2d85-eef5-44ae-acfc-2744685135ca/493c2d85-eef5-44ae-acfc-2744685135ca.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 722.269342] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c4147961-0afa-497d-afaa-027cb1521143 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.276466] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 722.276466] env[69994]: value = "task-3241519" [ 722.276466] env[69994]: _type = "Task" [ 722.276466] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.285191] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241519, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.363660] env[69994]: DEBUG nova.network.neutron [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Successfully updated port: 18ed1bc7-f241-4d6e-83f9-4df1b8b70c45 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 722.443223] env[69994]: DEBUG nova.network.neutron [req-7977d645-f712-453f-9a05-2bae312c33dd req-56fa7ea7-17dc-475b-be29-212bd2949594 service nova] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Updated VIF entry in instance network info cache for port c71264f5-b3f6-418a-8777-e4ef4b9895e0. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 722.443223] env[69994]: DEBUG nova.network.neutron [req-7977d645-f712-453f-9a05-2bae312c33dd req-56fa7ea7-17dc-475b-be29-212bd2949594 service nova] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Updating instance_info_cache with network_info: [{"id": "c71264f5-b3f6-418a-8777-e4ef4b9895e0", "address": "fa:16:3e:7d:ef:d9", "network": {"id": "200c7470-668b-414c-81ff-38c361914b33", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1520904988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91fa55bf90ff43a8b255a1e2fa2c22be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc71264f5-b3", "ovs_interfaceid": "c71264f5-b3f6-418a-8777-e4ef4b9895e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.563188] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]529c3d0f-6eee-94a6-9905-536de41f2ce1, 'name': SearchDatastore_Task, 'duration_secs': 0.01012} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.564566] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8845bd72-99e4-4792-961e-be73c9924ce7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.570903] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 722.570903] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52ac9907-8ea9-60d2-d910-dc7c6eeae4b4" [ 722.570903] env[69994]: _type = "Task" [ 722.570903] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.579402] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52ac9907-8ea9-60d2-d910-dc7c6eeae4b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.612904] env[69994]: DEBUG nova.compute.utils [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 722.617114] env[69994]: DEBUG nova.compute.manager [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 722.618049] env[69994]: DEBUG nova.network.neutron [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 722.656587] env[69994]: DEBUG nova.policy [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2ccfdf4c5e604bb3a5eca0ac5727774c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e5acf9a4a9344d4c9c91b75e83cf7a76', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 722.787218] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241519, 'name': Rename_Task, 'duration_secs': 0.158702} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.790503] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 722.791134] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4a2c1f64-bb6d-425c-9249-ff209d3f8b1c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.798281] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 722.798281] env[69994]: value = "task-3241520" [ 722.798281] env[69994]: _type = "Task" [ 722.798281] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.810944] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241520, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.867072] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Acquiring lock "refresh_cache-ce6f9a88-faa8-442e-8b48-64979dd2d03e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 722.867072] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Acquired lock "refresh_cache-ce6f9a88-faa8-442e-8b48-64979dd2d03e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 722.867072] env[69994]: DEBUG nova.network.neutron [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 722.944668] env[69994]: DEBUG oslo_concurrency.lockutils [req-7977d645-f712-453f-9a05-2bae312c33dd req-56fa7ea7-17dc-475b-be29-212bd2949594 service nova] Releasing lock "refresh_cache-e8b4640f-302d-43cd-a654-c42f9cb34766" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 723.051926] env[69994]: DEBUG nova.network.neutron [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Successfully created port: b7e8be98-685a-4d07-9440-e07af619b026 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 723.083815] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52ac9907-8ea9-60d2-d910-dc7c6eeae4b4, 'name': SearchDatastore_Task, 'duration_secs': 0.040096} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.084158] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 723.084434] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] e8b4640f-302d-43cd-a654-c42f9cb34766/e8b4640f-302d-43cd-a654-c42f9cb34766.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 723.084695] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-64d2651f-91a6-4c3b-b24f-b723d5504288 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.092614] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 723.092614] env[69994]: value = "task-3241521" [ 723.092614] env[69994]: _type = "Task" [ 723.092614] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.100447] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241521, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.118093] env[69994]: DEBUG nova.compute.manager [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 723.193782] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6d6d0c6-80ce-416e-bd56-76fcb2f64689 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.201438] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50c3583f-a9a5-4f81-8657-a205bcff1d36 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.242022] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64aacf14-c087-4978-88d7-0bad17402d2d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.247063] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e44703fb-1c51-4586-b5f8-6630855b1542 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.260476] env[69994]: DEBUG nova.compute.provider_tree [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 723.313077] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241520, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.515131] env[69994]: DEBUG nova.network.neutron [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 723.615653] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241521, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.791038] env[69994]: ERROR nova.scheduler.client.report [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] [req-1b777e8e-3c3d-43f1-8249-7ee34888f425] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 92ce3c95-4efe-4d04-802b-6b187afc5aa7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1b777e8e-3c3d-43f1-8249-7ee34888f425"}]} [ 723.796850] env[69994]: DEBUG oslo_vmware.rw_handles [None req-db9a6674-0560-4830-8810-6f77c97d7fb9 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5279313f-f0c3-4e6b-49de-7dc4881e6438/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 723.797949] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d053dd0-eaa1-4d6e-b36d-573a361eec03 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.810903] env[69994]: DEBUG oslo_vmware.rw_handles [None req-db9a6674-0560-4830-8810-6f77c97d7fb9 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5279313f-f0c3-4e6b-49de-7dc4881e6438/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 723.810903] env[69994]: ERROR oslo_vmware.rw_handles [None req-db9a6674-0560-4830-8810-6f77c97d7fb9 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5279313f-f0c3-4e6b-49de-7dc4881e6438/disk-0.vmdk due to incomplete transfer. [ 723.811433] env[69994]: DEBUG nova.network.neutron [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Updating instance_info_cache with network_info: [{"id": "18ed1bc7-f241-4d6e-83f9-4df1b8b70c45", "address": "fa:16:3e:4b:69:6f", "network": {"id": "74734ae5-86fb-4652-8203-198f5d02a70f", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-585354620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "dfa35e60b54941dfbfb8671758ccd039", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18ed1bc7-f2", "ovs_interfaceid": "18ed1bc7-f241-4d6e-83f9-4df1b8b70c45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 723.817982] env[69994]: DEBUG nova.scheduler.client.report [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Refreshing inventories for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 723.820447] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-018e9a5b-644c-4c04-b572-277cf8f189ec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.829117] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241520, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.830794] env[69994]: DEBUG oslo_vmware.rw_handles [None req-db9a6674-0560-4830-8810-6f77c97d7fb9 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5279313f-f0c3-4e6b-49de-7dc4881e6438/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 723.830794] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-db9a6674-0560-4830-8810-6f77c97d7fb9 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Uploaded image 40fec83b-63f3-470e-97c8-2b8af2ced26e to the Glance image server {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 723.832976] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-db9a6674-0560-4830-8810-6f77c97d7fb9 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Destroying the VM {{(pid=69994) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 723.835949] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-eea1c6d3-be1b-4507-9f98-1b52c910dcd8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.841851] env[69994]: DEBUG oslo_vmware.api [None req-db9a6674-0560-4830-8810-6f77c97d7fb9 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Waiting for the task: (returnval){ [ 723.841851] env[69994]: value = "task-3241522" [ 723.841851] env[69994]: _type = "Task" [ 723.841851] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.850394] env[69994]: DEBUG oslo_vmware.api [None req-db9a6674-0560-4830-8810-6f77c97d7fb9 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241522, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.851547] env[69994]: DEBUG nova.scheduler.client.report [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Updating ProviderTree inventory for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 723.851792] env[69994]: DEBUG nova.compute.provider_tree [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 723.865534] env[69994]: DEBUG nova.scheduler.client.report [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Refreshing aggregate associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 723.891387] env[69994]: DEBUG nova.scheduler.client.report [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Refreshing trait associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 724.102781] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241521, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.789968} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.103054] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] e8b4640f-302d-43cd-a654-c42f9cb34766/e8b4640f-302d-43cd-a654-c42f9cb34766.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 724.103275] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 724.103520] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eabb4b4e-a6b9-4855-9d8d-2159c841f726 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.112938] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 724.112938] env[69994]: value = "task-3241523" [ 724.112938] env[69994]: _type = "Task" [ 724.112938] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.125813] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241523, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.133331] env[69994]: DEBUG nova.compute.manager [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 724.156964] env[69994]: DEBUG nova.virt.hardware [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 724.157230] env[69994]: DEBUG nova.virt.hardware [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 724.157385] env[69994]: DEBUG nova.virt.hardware [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 724.157564] env[69994]: DEBUG nova.virt.hardware [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 724.157737] env[69994]: DEBUG nova.virt.hardware [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 724.157909] env[69994]: DEBUG nova.virt.hardware [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 724.158163] env[69994]: DEBUG nova.virt.hardware [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 724.158352] env[69994]: DEBUG nova.virt.hardware [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 724.158513] env[69994]: DEBUG nova.virt.hardware [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 724.158690] env[69994]: DEBUG nova.virt.hardware [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 724.158854] env[69994]: DEBUG nova.virt.hardware [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 724.159718] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e95fb9dd-e04a-433e-9505-2d4260780abd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.170788] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-865ec0a4-bedc-4b64-9067-3a33c54c19a5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.310878] env[69994]: DEBUG nova.compute.manager [req-e8d86a73-3862-4deb-8c9b-b457f4c61d66 req-b52dfa13-d118-4f9e-9fb0-01533fff4ac5 service nova] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Received event network-changed-18ed1bc7-f241-4d6e-83f9-4df1b8b70c45 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 724.311190] env[69994]: DEBUG nova.compute.manager [req-e8d86a73-3862-4deb-8c9b-b457f4c61d66 req-b52dfa13-d118-4f9e-9fb0-01533fff4ac5 service nova] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Refreshing instance network info cache due to event network-changed-18ed1bc7-f241-4d6e-83f9-4df1b8b70c45. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 724.311443] env[69994]: DEBUG oslo_concurrency.lockutils [req-e8d86a73-3862-4deb-8c9b-b457f4c61d66 req-b52dfa13-d118-4f9e-9fb0-01533fff4ac5 service nova] Acquiring lock "refresh_cache-ce6f9a88-faa8-442e-8b48-64979dd2d03e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.317594] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241520, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.323464] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Releasing lock "refresh_cache-ce6f9a88-faa8-442e-8b48-64979dd2d03e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 724.323737] env[69994]: DEBUG nova.compute.manager [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Instance network_info: |[{"id": "18ed1bc7-f241-4d6e-83f9-4df1b8b70c45", "address": "fa:16:3e:4b:69:6f", "network": {"id": "74734ae5-86fb-4652-8203-198f5d02a70f", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-585354620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "dfa35e60b54941dfbfb8671758ccd039", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18ed1bc7-f2", "ovs_interfaceid": "18ed1bc7-f241-4d6e-83f9-4df1b8b70c45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 724.324082] env[69994]: DEBUG oslo_concurrency.lockutils [req-e8d86a73-3862-4deb-8c9b-b457f4c61d66 req-b52dfa13-d118-4f9e-9fb0-01533fff4ac5 service nova] Acquired lock "refresh_cache-ce6f9a88-faa8-442e-8b48-64979dd2d03e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 724.324320] env[69994]: DEBUG nova.network.neutron [req-e8d86a73-3862-4deb-8c9b-b457f4c61d66 req-b52dfa13-d118-4f9e-9fb0-01533fff4ac5 service nova] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Refreshing network info cache for port 18ed1bc7-f241-4d6e-83f9-4df1b8b70c45 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 724.325621] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4b:69:6f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ecc4615-18f0-4324-8e16-5e5d513325e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '18ed1bc7-f241-4d6e-83f9-4df1b8b70c45', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 724.333842] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Creating folder: Project (dfa35e60b54941dfbfb8671758ccd039). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 724.337495] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-37d4cd6c-c7e9-4544-97cd-37f32ac25164 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.347936] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Created folder: Project (dfa35e60b54941dfbfb8671758ccd039) in parent group-v647729. [ 724.348098] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Creating folder: Instances. Parent ref: group-v647824. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 724.348385] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e5f63acd-6889-49a3-a4d1-91978dc33dbc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.356093] env[69994]: DEBUG oslo_vmware.api [None req-db9a6674-0560-4830-8810-6f77c97d7fb9 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241522, 'name': Destroy_Task, 'duration_secs': 0.500237} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.357143] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-db9a6674-0560-4830-8810-6f77c97d7fb9 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Destroyed the VM [ 724.357438] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-db9a6674-0560-4830-8810-6f77c97d7fb9 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Deleting Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 724.357763] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-8ad900fd-0f86-4918-99a8-62465bda8028 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.364763] env[69994]: DEBUG oslo_vmware.api [None req-db9a6674-0560-4830-8810-6f77c97d7fb9 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Waiting for the task: (returnval){ [ 724.364763] env[69994]: value = "task-3241526" [ 724.364763] env[69994]: _type = "Task" [ 724.364763] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.366215] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Created folder: Instances in parent group-v647824. [ 724.366463] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 724.372042] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 724.373229] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f73f3b3a-b314-4819-bcd2-9c3897a97296 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.393390] env[69994]: DEBUG oslo_vmware.api [None req-db9a6674-0560-4830-8810-6f77c97d7fb9 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241526, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.394703] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 724.394703] env[69994]: value = "task-3241527" [ 724.394703] env[69994]: _type = "Task" [ 724.394703] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.403444] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241527, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.480775] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e71318d-79be-4f37-a418-cd48e962a5fc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.488952] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c00f8be0-aaee-42fb-bad0-db3ca1b7d87e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.525752] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be71555c-6130-4fa8-9303-3c4a9ba05c62 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.533511] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adf81715-3a77-413b-8c53-59ad613d2b66 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.547889] env[69994]: DEBUG nova.compute.provider_tree [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 724.622562] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241523, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.149211} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.622838] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 724.623699] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a95014d-8e31-4c41-921f-2839192da8e6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.647264] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Reconfiguring VM instance instance-0000001f to attach disk [datastore1] e8b4640f-302d-43cd-a654-c42f9cb34766/e8b4640f-302d-43cd-a654-c42f9cb34766.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 724.647594] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ef99f08-d039-4cda-96f8-51ada281e231 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.668611] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 724.668611] env[69994]: value = "task-3241528" [ 724.668611] env[69994]: _type = "Task" [ 724.668611] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.676803] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241528, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.814046] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241520, 'name': PowerOnVM_Task, 'duration_secs': 1.56602} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.814333] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 724.814494] env[69994]: INFO nova.compute.manager [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Took 8.93 seconds to spawn the instance on the hypervisor. [ 724.814672] env[69994]: DEBUG nova.compute.manager [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 724.815467] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cd258dc-5428-434c-9236-039f4bbb196a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.876951] env[69994]: DEBUG oslo_vmware.api [None req-db9a6674-0560-4830-8810-6f77c97d7fb9 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241526, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.904236] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241527, 'name': CreateVM_Task, 'duration_secs': 0.372555} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.904476] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 724.905165] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.905339] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 724.905722] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 724.905990] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b3135d2-a57f-41e9-8fe5-51f5eda8a317 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.912172] env[69994]: DEBUG oslo_vmware.api [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Waiting for the task: (returnval){ [ 724.912172] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52ea49ff-de1d-1850-4467-db275303e443" [ 724.912172] env[69994]: _type = "Task" [ 724.912172] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.920897] env[69994]: DEBUG oslo_vmware.api [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52ea49ff-de1d-1850-4467-db275303e443, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.095105] env[69994]: DEBUG nova.scheduler.client.report [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Updated inventory for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with generation 61 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 725.095105] env[69994]: DEBUG nova.compute.provider_tree [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Updating resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 generation from 61 to 62 during operation: update_inventory {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 725.095293] env[69994]: DEBUG nova.compute.provider_tree [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 725.180647] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241528, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.281349] env[69994]: DEBUG nova.network.neutron [req-e8d86a73-3862-4deb-8c9b-b457f4c61d66 req-b52dfa13-d118-4f9e-9fb0-01533fff4ac5 service nova] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Updated VIF entry in instance network info cache for port 18ed1bc7-f241-4d6e-83f9-4df1b8b70c45. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 725.281806] env[69994]: DEBUG nova.network.neutron [req-e8d86a73-3862-4deb-8c9b-b457f4c61d66 req-b52dfa13-d118-4f9e-9fb0-01533fff4ac5 service nova] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Updating instance_info_cache with network_info: [{"id": "18ed1bc7-f241-4d6e-83f9-4df1b8b70c45", "address": "fa:16:3e:4b:69:6f", "network": {"id": "74734ae5-86fb-4652-8203-198f5d02a70f", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-585354620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "dfa35e60b54941dfbfb8671758ccd039", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18ed1bc7-f2", "ovs_interfaceid": "18ed1bc7-f241-4d6e-83f9-4df1b8b70c45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.334116] env[69994]: INFO nova.compute.manager [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Took 49.43 seconds to build instance. [ 725.376014] env[69994]: DEBUG oslo_vmware.api [None req-db9a6674-0560-4830-8810-6f77c97d7fb9 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241526, 'name': RemoveSnapshot_Task, 'duration_secs': 1.004188} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.376284] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-db9a6674-0560-4830-8810-6f77c97d7fb9 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Deleted Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 725.379587] env[69994]: INFO nova.compute.manager [None req-db9a6674-0560-4830-8810-6f77c97d7fb9 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Took 13.16 seconds to snapshot the instance on the hypervisor. [ 725.392241] env[69994]: DEBUG nova.network.neutron [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Successfully updated port: b7e8be98-685a-4d07-9440-e07af619b026 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 725.424706] env[69994]: DEBUG oslo_vmware.api [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52ea49ff-de1d-1850-4467-db275303e443, 'name': SearchDatastore_Task, 'duration_secs': 0.036757} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.425060] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 725.425269] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 725.425506] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.425655] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 725.425833] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 725.426107] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-447ce0ae-37be-4bee-afd0-8da83e0f0667 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.440678] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 725.440678] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 725.441310] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f69dbfea-2847-457e-bd27-5e3de7e7a80d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.446801] env[69994]: DEBUG oslo_vmware.api [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Waiting for the task: (returnval){ [ 725.446801] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52485f45-9bf8-2b3c-f229-54c87d1aa77a" [ 725.446801] env[69994]: _type = "Task" [ 725.446801] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.454337] env[69994]: DEBUG oslo_vmware.api [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52485f45-9bf8-2b3c-f229-54c87d1aa77a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.603769] env[69994]: DEBUG oslo_concurrency.lockutils [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.495s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 725.608369] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0d56f690-5a19-48c2-87a9-1740fed61a6a tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.657s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 725.608577] env[69994]: DEBUG nova.objects.instance [None req-0d56f690-5a19-48c2-87a9-1740fed61a6a tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Lazy-loading 'resources' on Instance uuid 84bff4c0-9e2e-47f2-a378-70d3c992b58b {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 725.631057] env[69994]: INFO nova.scheduler.client.report [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Deleted allocations for instance e9bc15f9-e957-487f-b8d5-d1332b185dcf [ 725.679685] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241528, 'name': ReconfigVM_Task, 'duration_secs': 0.635028} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.680945] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Reconfigured VM instance instance-0000001f to attach disk [datastore1] e8b4640f-302d-43cd-a654-c42f9cb34766/e8b4640f-302d-43cd-a654-c42f9cb34766.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 725.681408] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-39bb29ec-3ad0-45d9-b888-105f03929640 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.689478] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 725.689478] env[69994]: value = "task-3241529" [ 725.689478] env[69994]: _type = "Task" [ 725.689478] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.697798] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241529, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.785071] env[69994]: DEBUG oslo_concurrency.lockutils [req-e8d86a73-3862-4deb-8c9b-b457f4c61d66 req-b52dfa13-d118-4f9e-9fb0-01533fff4ac5 service nova] Releasing lock "refresh_cache-ce6f9a88-faa8-442e-8b48-64979dd2d03e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 725.836671] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "493c2d85-eef5-44ae-acfc-2744685135ca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.157s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 725.898102] env[69994]: DEBUG oslo_concurrency.lockutils [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquiring lock "refresh_cache-87473dd1-458d-4ef4-a1bd-7e653e509ea4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.898102] env[69994]: DEBUG oslo_concurrency.lockutils [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquired lock "refresh_cache-87473dd1-458d-4ef4-a1bd-7e653e509ea4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 725.898102] env[69994]: DEBUG nova.network.neutron [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 725.957578] env[69994]: DEBUG oslo_vmware.api [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52485f45-9bf8-2b3c-f229-54c87d1aa77a, 'name': SearchDatastore_Task, 'duration_secs': 0.034797} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.958504] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f69fba11-f82f-4929-baaa-b94bbe53de29 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.963927] env[69994]: DEBUG oslo_vmware.api [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Waiting for the task: (returnval){ [ 725.963927] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52b872ae-ed92-1d2f-86cc-4ff3b31cc06e" [ 725.963927] env[69994]: _type = "Task" [ 725.963927] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.972396] env[69994]: DEBUG oslo_vmware.api [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52b872ae-ed92-1d2f-86cc-4ff3b31cc06e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.137867] env[69994]: DEBUG oslo_concurrency.lockutils [None req-86206834-a523-4bc3-959e-7df98cb6991d tempest-ServerShowV247Test-435710788 tempest-ServerShowV247Test-435710788-project-member] Lock "e9bc15f9-e957-487f-b8d5-d1332b185dcf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.381s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 726.202246] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241529, 'name': Rename_Task, 'duration_secs': 0.31803} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.204540] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 726.204929] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9644b382-7268-4d8f-9f6d-2563f0555d28 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.211125] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 726.211125] env[69994]: value = "task-3241530" [ 726.211125] env[69994]: _type = "Task" [ 726.211125] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.221878] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241530, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.339099] env[69994]: DEBUG nova.compute.manager [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 726.363961] env[69994]: DEBUG nova.compute.manager [req-1435431b-c84f-4b21-8e59-e6449863dad7 req-419e3a25-e260-40a7-bc2c-17982f5470f4 service nova] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Received event network-vif-plugged-b7e8be98-685a-4d07-9440-e07af619b026 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 726.364233] env[69994]: DEBUG oslo_concurrency.lockutils [req-1435431b-c84f-4b21-8e59-e6449863dad7 req-419e3a25-e260-40a7-bc2c-17982f5470f4 service nova] Acquiring lock "87473dd1-458d-4ef4-a1bd-7e653e509ea4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.364438] env[69994]: DEBUG oslo_concurrency.lockutils [req-1435431b-c84f-4b21-8e59-e6449863dad7 req-419e3a25-e260-40a7-bc2c-17982f5470f4 service nova] Lock "87473dd1-458d-4ef4-a1bd-7e653e509ea4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.364574] env[69994]: DEBUG oslo_concurrency.lockutils [req-1435431b-c84f-4b21-8e59-e6449863dad7 req-419e3a25-e260-40a7-bc2c-17982f5470f4 service nova] Lock "87473dd1-458d-4ef4-a1bd-7e653e509ea4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 726.364697] env[69994]: DEBUG nova.compute.manager [req-1435431b-c84f-4b21-8e59-e6449863dad7 req-419e3a25-e260-40a7-bc2c-17982f5470f4 service nova] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] No waiting events found dispatching network-vif-plugged-b7e8be98-685a-4d07-9440-e07af619b026 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 726.364862] env[69994]: WARNING nova.compute.manager [req-1435431b-c84f-4b21-8e59-e6449863dad7 req-419e3a25-e260-40a7-bc2c-17982f5470f4 service nova] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Received unexpected event network-vif-plugged-b7e8be98-685a-4d07-9440-e07af619b026 for instance with vm_state building and task_state spawning. [ 726.365027] env[69994]: DEBUG nova.compute.manager [req-1435431b-c84f-4b21-8e59-e6449863dad7 req-419e3a25-e260-40a7-bc2c-17982f5470f4 service nova] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Received event network-changed-b7e8be98-685a-4d07-9440-e07af619b026 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 726.365185] env[69994]: DEBUG nova.compute.manager [req-1435431b-c84f-4b21-8e59-e6449863dad7 req-419e3a25-e260-40a7-bc2c-17982f5470f4 service nova] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Refreshing instance network info cache due to event network-changed-b7e8be98-685a-4d07-9440-e07af619b026. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 726.365350] env[69994]: DEBUG oslo_concurrency.lockutils [req-1435431b-c84f-4b21-8e59-e6449863dad7 req-419e3a25-e260-40a7-bc2c-17982f5470f4 service nova] Acquiring lock "refresh_cache-87473dd1-458d-4ef4-a1bd-7e653e509ea4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.453343] env[69994]: DEBUG nova.network.neutron [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 726.476514] env[69994]: DEBUG oslo_vmware.api [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52b872ae-ed92-1d2f-86cc-4ff3b31cc06e, 'name': SearchDatastore_Task, 'duration_secs': 0.012256} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.476872] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 726.477031] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] ce6f9a88-faa8-442e-8b48-64979dd2d03e/ce6f9a88-faa8-442e-8b48-64979dd2d03e.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 726.479561] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fa3b7664-1422-4b91-8e35-5cff79466116 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.486594] env[69994]: DEBUG oslo_vmware.api [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Waiting for the task: (returnval){ [ 726.486594] env[69994]: value = "task-3241531" [ 726.486594] env[69994]: _type = "Task" [ 726.486594] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.498263] env[69994]: DEBUG oslo_vmware.api [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Task: {'id': task-3241531, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.643690] env[69994]: DEBUG nova.network.neutron [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Updating instance_info_cache with network_info: [{"id": "b7e8be98-685a-4d07-9440-e07af619b026", "address": "fa:16:3e:0a:5e:32", "network": {"id": "dc86f717-1eb9-419c-aa6f-f5dd10804e22", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-830745446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5acf9a4a9344d4c9c91b75e83cf7a76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c66a277b-e3bf-43b8-a632-04fdd0720b91", "external-id": "nsx-vlan-transportzone-665", "segmentation_id": 665, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7e8be98-68", "ovs_interfaceid": "b7e8be98-685a-4d07-9440-e07af619b026", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.673118] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-070cb52b-758b-4d24-b09d-5d9345eeaa4d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.681715] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6fc82b1-7700-4535-b271-fb972e460f65 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.716082] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0611ed4-f3c0-48a2-8ce0-16a4e9672f11 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.728814] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-995fd8dc-b1c5-45a3-964a-ed81c27685f6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.732879] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241530, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.743744] env[69994]: DEBUG nova.compute.provider_tree [None req-0d56f690-5a19-48c2-87a9-1740fed61a6a tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 726.859721] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.899899] env[69994]: DEBUG nova.compute.manager [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 726.899899] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6598e1ce-9d1a-46c4-96fc-2931cbf8e06c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.996840] env[69994]: DEBUG oslo_vmware.api [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Task: {'id': task-3241531, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.149267] env[69994]: DEBUG oslo_concurrency.lockutils [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Releasing lock "refresh_cache-87473dd1-458d-4ef4-a1bd-7e653e509ea4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 727.149601] env[69994]: DEBUG nova.compute.manager [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Instance network_info: |[{"id": "b7e8be98-685a-4d07-9440-e07af619b026", "address": "fa:16:3e:0a:5e:32", "network": {"id": "dc86f717-1eb9-419c-aa6f-f5dd10804e22", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-830745446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5acf9a4a9344d4c9c91b75e83cf7a76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c66a277b-e3bf-43b8-a632-04fdd0720b91", "external-id": "nsx-vlan-transportzone-665", "segmentation_id": 665, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7e8be98-68", "ovs_interfaceid": "b7e8be98-685a-4d07-9440-e07af619b026", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 727.149916] env[69994]: DEBUG oslo_concurrency.lockutils [req-1435431b-c84f-4b21-8e59-e6449863dad7 req-419e3a25-e260-40a7-bc2c-17982f5470f4 service nova] Acquired lock "refresh_cache-87473dd1-458d-4ef4-a1bd-7e653e509ea4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 727.150176] env[69994]: DEBUG nova.network.neutron [req-1435431b-c84f-4b21-8e59-e6449863dad7 req-419e3a25-e260-40a7-bc2c-17982f5470f4 service nova] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Refreshing network info cache for port b7e8be98-685a-4d07-9440-e07af619b026 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 727.151326] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0a:5e:32', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c66a277b-e3bf-43b8-a632-04fdd0720b91', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b7e8be98-685a-4d07-9440-e07af619b026', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 727.158857] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Creating folder: Project (e5acf9a4a9344d4c9c91b75e83cf7a76). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 727.159368] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-49fb9188-7149-45dc-b7c9-30b9169ed842 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.170888] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Created folder: Project (e5acf9a4a9344d4c9c91b75e83cf7a76) in parent group-v647729. [ 727.171034] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Creating folder: Instances. Parent ref: group-v647827. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 727.171267] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-19e8c0c1-68e4-4b6d-bf3a-fc5e4bab26f8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.181027] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Created folder: Instances in parent group-v647827. [ 727.182079] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 727.182079] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 727.182079] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-76d5a0ee-e3cb-40c5-8ae7-f4f8a688a898 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.204525] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 727.204525] env[69994]: value = "task-3241534" [ 727.204525] env[69994]: _type = "Task" [ 727.204525] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.212566] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241534, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.219958] env[69994]: DEBUG oslo_vmware.api [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241530, 'name': PowerOnVM_Task, 'duration_secs': 0.545478} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.220228] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 727.220430] env[69994]: INFO nova.compute.manager [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Took 8.60 seconds to spawn the instance on the hypervisor. [ 727.220610] env[69994]: DEBUG nova.compute.manager [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 727.221886] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bad26b89-1e78-4b24-89b5-5ab027d4cbe2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.247431] env[69994]: DEBUG nova.scheduler.client.report [None req-0d56f690-5a19-48c2-87a9-1740fed61a6a tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 727.411589] env[69994]: INFO nova.compute.manager [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] instance snapshotting [ 727.414387] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6a1fd01-ba06-41f2-aa95-0b44ee0daffe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.435358] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d550eb8-bb34-4481-a31e-c8ce080de668 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.499303] env[69994]: DEBUG oslo_vmware.api [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Task: {'id': task-3241531, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.523099} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.499675] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] ce6f9a88-faa8-442e-8b48-64979dd2d03e/ce6f9a88-faa8-442e-8b48-64979dd2d03e.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 727.499759] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 727.499987] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-99d93d32-9168-4511-bcf3-23f3b9d0fc9f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.506365] env[69994]: DEBUG oslo_vmware.api [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Waiting for the task: (returnval){ [ 727.506365] env[69994]: value = "task-3241535" [ 727.506365] env[69994]: _type = "Task" [ 727.506365] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.513850] env[69994]: DEBUG oslo_vmware.api [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Task: {'id': task-3241535, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.714129] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241534, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.739416] env[69994]: INFO nova.compute.manager [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Took 50.19 seconds to build instance. [ 727.756093] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0d56f690-5a19-48c2-87a9-1740fed61a6a tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.149s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 727.762976] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.575s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 727.762976] env[69994]: INFO nova.compute.claims [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 727.790857] env[69994]: INFO nova.scheduler.client.report [None req-0d56f690-5a19-48c2-87a9-1740fed61a6a tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Deleted allocations for instance 84bff4c0-9e2e-47f2-a378-70d3c992b58b [ 727.887661] env[69994]: DEBUG nova.network.neutron [req-1435431b-c84f-4b21-8e59-e6449863dad7 req-419e3a25-e260-40a7-bc2c-17982f5470f4 service nova] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Updated VIF entry in instance network info cache for port b7e8be98-685a-4d07-9440-e07af619b026. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 727.888048] env[69994]: DEBUG nova.network.neutron [req-1435431b-c84f-4b21-8e59-e6449863dad7 req-419e3a25-e260-40a7-bc2c-17982f5470f4 service nova] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Updating instance_info_cache with network_info: [{"id": "b7e8be98-685a-4d07-9440-e07af619b026", "address": "fa:16:3e:0a:5e:32", "network": {"id": "dc86f717-1eb9-419c-aa6f-f5dd10804e22", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-830745446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5acf9a4a9344d4c9c91b75e83cf7a76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c66a277b-e3bf-43b8-a632-04fdd0720b91", "external-id": "nsx-vlan-transportzone-665", "segmentation_id": 665, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7e8be98-68", "ovs_interfaceid": "b7e8be98-685a-4d07-9440-e07af619b026", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.949604] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Creating Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 727.949910] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e81684d0-1caa-467c-9179-fbb7736ef37e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.957971] env[69994]: DEBUG oslo_vmware.api [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Waiting for the task: (returnval){ [ 727.957971] env[69994]: value = "task-3241536" [ 727.957971] env[69994]: _type = "Task" [ 727.957971] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.965907] env[69994]: DEBUG oslo_vmware.api [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241536, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.015639] env[69994]: DEBUG oslo_vmware.api [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Task: {'id': task-3241535, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.120474} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.015890] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 728.016708] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b68fa381-b2b3-4d58-a2b8-91b539ff127e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.041343] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Reconfiguring VM instance instance-00000020 to attach disk [datastore2] ce6f9a88-faa8-442e-8b48-64979dd2d03e/ce6f9a88-faa8-442e-8b48-64979dd2d03e.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 728.041634] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d3ad2962-e12a-42fd-92d5-d16631fef9fa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.061672] env[69994]: DEBUG oslo_vmware.api [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Waiting for the task: (returnval){ [ 728.061672] env[69994]: value = "task-3241537" [ 728.061672] env[69994]: _type = "Task" [ 728.061672] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.069448] env[69994]: DEBUG oslo_vmware.api [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Task: {'id': task-3241537, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.214508] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241534, 'name': CreateVM_Task, 'duration_secs': 0.55799} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.214680] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 728.215377] env[69994]: DEBUG oslo_concurrency.lockutils [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.215542] env[69994]: DEBUG oslo_concurrency.lockutils [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 728.215890] env[69994]: DEBUG oslo_concurrency.lockutils [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 728.216164] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2808fbb6-5140-43c3-87c8-a919aba25805 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.220811] env[69994]: DEBUG oslo_vmware.api [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 728.220811] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]525aa312-c7f5-a850-e187-59351ab01615" [ 728.220811] env[69994]: _type = "Task" [ 728.220811] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.228406] env[69994]: DEBUG oslo_vmware.api [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]525aa312-c7f5-a850-e187-59351ab01615, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.243225] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3684357d-e990-4512-ae33-eca74bf4721d tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "e8b4640f-302d-43cd-a654-c42f9cb34766" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.536s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 728.297937] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0d56f690-5a19-48c2-87a9-1740fed61a6a tempest-ServerExternalEventsTest-1345251388 tempest-ServerExternalEventsTest-1345251388-project-member] Lock "84bff4c0-9e2e-47f2-a378-70d3c992b58b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.801s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 728.390375] env[69994]: DEBUG oslo_concurrency.lockutils [req-1435431b-c84f-4b21-8e59-e6449863dad7 req-419e3a25-e260-40a7-bc2c-17982f5470f4 service nova] Releasing lock "refresh_cache-87473dd1-458d-4ef4-a1bd-7e653e509ea4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 728.471349] env[69994]: DEBUG oslo_vmware.api [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241536, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.576019] env[69994]: DEBUG oslo_vmware.api [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Task: {'id': task-3241537, 'name': ReconfigVM_Task, 'duration_secs': 0.331124} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.576334] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Reconfigured VM instance instance-00000020 to attach disk [datastore2] ce6f9a88-faa8-442e-8b48-64979dd2d03e/ce6f9a88-faa8-442e-8b48-64979dd2d03e.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 728.576974] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c4e6cec1-5fa3-49a2-9bc2-96e77f54dc29 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.585950] env[69994]: DEBUG oslo_vmware.api [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Waiting for the task: (returnval){ [ 728.585950] env[69994]: value = "task-3241538" [ 728.585950] env[69994]: _type = "Task" [ 728.585950] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.595843] env[69994]: DEBUG oslo_vmware.api [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Task: {'id': task-3241538, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.735748] env[69994]: DEBUG oslo_vmware.api [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]525aa312-c7f5-a850-e187-59351ab01615, 'name': SearchDatastore_Task, 'duration_secs': 0.010183} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.736230] env[69994]: DEBUG oslo_concurrency.lockutils [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 728.739023] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 728.739023] env[69994]: DEBUG oslo_concurrency.lockutils [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.739023] env[69994]: DEBUG oslo_concurrency.lockutils [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 728.739023] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 728.739023] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8702886d-41d7-4093-a2d3-2088733aab6f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.744323] env[69994]: DEBUG nova.compute.manager [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 728.751972] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 728.752215] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 728.753007] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-944069fb-937c-4026-ade7-1127d21b022e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.763166] env[69994]: DEBUG oslo_vmware.api [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 728.763166] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52c10539-36f8-6fe0-4c36-2a1707a33b01" [ 728.763166] env[69994]: _type = "Task" [ 728.763166] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.777827] env[69994]: DEBUG oslo_vmware.api [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52c10539-36f8-6fe0-4c36-2a1707a33b01, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.969426] env[69994]: DEBUG oslo_vmware.api [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241536, 'name': CreateSnapshot_Task, 'duration_secs': 0.546344} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.977489] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Created Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 728.980198] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d265ec2-4c5a-426d-8c8e-cf07f0462273 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.103076] env[69994]: DEBUG oslo_vmware.api [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Task: {'id': task-3241538, 'name': Rename_Task, 'duration_secs': 0.143806} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.107150] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 729.107150] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a55e8c93-77c2-47b9-9d96-2d9464176043 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.117620] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3641fa74-6625-485f-a2e9-de9667c71d26 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquiring lock "e8b4640f-302d-43cd-a654-c42f9cb34766" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 729.117918] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3641fa74-6625-485f-a2e9-de9667c71d26 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "e8b4640f-302d-43cd-a654-c42f9cb34766" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 729.118171] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3641fa74-6625-485f-a2e9-de9667c71d26 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquiring lock "e8b4640f-302d-43cd-a654-c42f9cb34766-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 729.118372] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3641fa74-6625-485f-a2e9-de9667c71d26 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "e8b4640f-302d-43cd-a654-c42f9cb34766-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 729.118752] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3641fa74-6625-485f-a2e9-de9667c71d26 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "e8b4640f-302d-43cd-a654-c42f9cb34766-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 729.121039] env[69994]: DEBUG oslo_vmware.api [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Waiting for the task: (returnval){ [ 729.121039] env[69994]: value = "task-3241539" [ 729.121039] env[69994]: _type = "Task" [ 729.121039] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.121481] env[69994]: INFO nova.compute.manager [None req-3641fa74-6625-485f-a2e9-de9667c71d26 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Terminating instance [ 729.137149] env[69994]: DEBUG oslo_vmware.api [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Task: {'id': task-3241539, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.283186] env[69994]: DEBUG oslo_concurrency.lockutils [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 729.289238] env[69994]: DEBUG oslo_vmware.api [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52c10539-36f8-6fe0-4c36-2a1707a33b01, 'name': SearchDatastore_Task, 'duration_secs': 0.024595} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.289728] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89d0a510-f12c-45e7-8422-5ae7eccdb4fc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.296400] env[69994]: DEBUG oslo_vmware.api [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 729.296400] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5289d34d-44a4-b547-0f81-ae5d7703d88b" [ 729.296400] env[69994]: _type = "Task" [ 729.296400] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.307147] env[69994]: DEBUG oslo_vmware.api [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5289d34d-44a4-b547-0f81-ae5d7703d88b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.323395] env[69994]: DEBUG oslo_concurrency.lockutils [None req-46f5be88-6d55-419b-9741-c02dd8a3f5df tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquiring lock "493c2d85-eef5-44ae-acfc-2744685135ca" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 729.324406] env[69994]: DEBUG oslo_concurrency.lockutils [None req-46f5be88-6d55-419b-9741-c02dd8a3f5df tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "493c2d85-eef5-44ae-acfc-2744685135ca" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 729.324406] env[69994]: DEBUG oslo_concurrency.lockutils [None req-46f5be88-6d55-419b-9741-c02dd8a3f5df tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquiring lock "493c2d85-eef5-44ae-acfc-2744685135ca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 729.324406] env[69994]: DEBUG oslo_concurrency.lockutils [None req-46f5be88-6d55-419b-9741-c02dd8a3f5df tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "493c2d85-eef5-44ae-acfc-2744685135ca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 729.324406] env[69994]: DEBUG oslo_concurrency.lockutils [None req-46f5be88-6d55-419b-9741-c02dd8a3f5df tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "493c2d85-eef5-44ae-acfc-2744685135ca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 729.327108] env[69994]: INFO nova.compute.manager [None req-46f5be88-6d55-419b-9741-c02dd8a3f5df tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Terminating instance [ 729.436739] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f03236d-197e-4996-a484-fa72be6ae8cd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.445339] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c0a8a64-f34b-44be-a50a-1a39123d7d97 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.477132] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e345bc20-c978-4cc3-a8c5-28e4b332ce41 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.486369] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d564fe2-524e-4388-b43c-fd8b11c22d68 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.510494] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Creating linked-clone VM from snapshot {{(pid=69994) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 729.514020] env[69994]: DEBUG nova.compute.provider_tree [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 729.517032] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-13bee7aa-746f-4b56-90a1-931e9d286d2e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.525596] env[69994]: DEBUG oslo_vmware.api [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Waiting for the task: (returnval){ [ 729.525596] env[69994]: value = "task-3241540" [ 729.525596] env[69994]: _type = "Task" [ 729.525596] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.535355] env[69994]: DEBUG oslo_vmware.api [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241540, 'name': CloneVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.632218] env[69994]: DEBUG nova.compute.manager [None req-3641fa74-6625-485f-a2e9-de9667c71d26 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 729.632436] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3641fa74-6625-485f-a2e9-de9667c71d26 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 729.635794] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06251d7e-e131-488d-be66-43e6d570fb07 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.646463] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3641fa74-6625-485f-a2e9-de9667c71d26 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 729.646788] env[69994]: DEBUG oslo_vmware.api [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Task: {'id': task-3241539, 'name': PowerOnVM_Task, 'duration_secs': 0.490665} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.648025] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3485e5af-f324-4995-a9f1-d058667bec36 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.648682] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 729.648904] env[69994]: INFO nova.compute.manager [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Took 8.24 seconds to spawn the instance on the hypervisor. [ 729.650425] env[69994]: DEBUG nova.compute.manager [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 729.651250] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a075ed13-29d3-4be1-9ddb-a1d735b1a9bf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.668580] env[69994]: DEBUG oslo_vmware.api [None req-3641fa74-6625-485f-a2e9-de9667c71d26 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 729.668580] env[69994]: value = "task-3241541" [ 729.668580] env[69994]: _type = "Task" [ 729.668580] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.678932] env[69994]: DEBUG oslo_vmware.api [None req-3641fa74-6625-485f-a2e9-de9667c71d26 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241541, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.814120] env[69994]: DEBUG oslo_vmware.api [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5289d34d-44a4-b547-0f81-ae5d7703d88b, 'name': SearchDatastore_Task, 'duration_secs': 0.009853} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.814372] env[69994]: DEBUG oslo_concurrency.lockutils [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 729.814559] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 87473dd1-458d-4ef4-a1bd-7e653e509ea4/87473dd1-458d-4ef4-a1bd-7e653e509ea4.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 729.816777] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8cd0ff38-2aa3-44a0-95e9-2634f2427ec6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.822343] env[69994]: DEBUG oslo_vmware.api [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 729.822343] env[69994]: value = "task-3241542" [ 729.822343] env[69994]: _type = "Task" [ 729.822343] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.831637] env[69994]: DEBUG oslo_vmware.api [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241542, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.835608] env[69994]: DEBUG nova.compute.manager [None req-46f5be88-6d55-419b-9741-c02dd8a3f5df tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 729.835608] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-46f5be88-6d55-419b-9741-c02dd8a3f5df tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 729.835608] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2ac64a6-62f7-464f-bd96-6c0ade744a7b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.843460] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-46f5be88-6d55-419b-9741-c02dd8a3f5df tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 729.843585] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3a5f523c-ecdf-4374-9d16-6681aab5ae5a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.852835] env[69994]: DEBUG oslo_vmware.api [None req-46f5be88-6d55-419b-9741-c02dd8a3f5df tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 729.852835] env[69994]: value = "task-3241543" [ 729.852835] env[69994]: _type = "Task" [ 729.852835] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.863816] env[69994]: DEBUG oslo_vmware.api [None req-46f5be88-6d55-419b-9741-c02dd8a3f5df tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241543, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.020011] env[69994]: DEBUG nova.scheduler.client.report [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 730.041532] env[69994]: DEBUG oslo_vmware.api [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241540, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.186837] env[69994]: DEBUG oslo_vmware.api [None req-3641fa74-6625-485f-a2e9-de9667c71d26 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241541, 'name': PowerOffVM_Task, 'duration_secs': 0.253793} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.190288] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3641fa74-6625-485f-a2e9-de9667c71d26 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 730.190288] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3641fa74-6625-485f-a2e9-de9667c71d26 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 730.190788] env[69994]: INFO nova.compute.manager [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Took 48.93 seconds to build instance. [ 730.191783] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b43769c5-fbba-4341-9e0e-4b278945d6a3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.341373] env[69994]: DEBUG oslo_vmware.api [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241542, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.366542] env[69994]: DEBUG oslo_vmware.api [None req-46f5be88-6d55-419b-9741-c02dd8a3f5df tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241543, 'name': PowerOffVM_Task, 'duration_secs': 0.163658} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.367358] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-46f5be88-6d55-419b-9741-c02dd8a3f5df tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 730.368236] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-46f5be88-6d55-419b-9741-c02dd8a3f5df tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 730.371588] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-99da6df0-efdb-4163-ae39-e852cf3e88c4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.526366] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.766s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 730.526911] env[69994]: DEBUG nova.compute.manager [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 730.531190] env[69994]: DEBUG oslo_concurrency.lockutils [None req-354c995b-f30f-4d82-86f7-441e948afa00 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.797s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 730.531190] env[69994]: DEBUG nova.objects.instance [None req-354c995b-f30f-4d82-86f7-441e948afa00 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Lazy-loading 'resources' on Instance uuid ad957c30-c923-4bbf-8841-00e99de44781 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 730.541488] env[69994]: DEBUG oslo_vmware.api [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241540, 'name': CloneVM_Task} progress is 95%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.694997] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc3cc296-67e3-423a-becf-f916830c0fb0 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Lock "ce6f9a88-faa8-442e-8b48-64979dd2d03e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.641s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 730.836410] env[69994]: DEBUG oslo_vmware.api [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241542, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.564946} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.836683] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 87473dd1-458d-4ef4-a1bd-7e653e509ea4/87473dd1-458d-4ef4-a1bd-7e653e509ea4.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 730.836899] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 730.837178] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3e68eb58-355b-4171-b92c-aedbc6bb8c07 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.844629] env[69994]: DEBUG oslo_vmware.api [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 730.844629] env[69994]: value = "task-3241546" [ 730.844629] env[69994]: _type = "Task" [ 730.844629] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.852229] env[69994]: DEBUG oslo_vmware.api [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241546, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.950016] env[69994]: DEBUG oslo_concurrency.lockutils [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Acquiring lock "6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 730.950273] env[69994]: DEBUG oslo_concurrency.lockutils [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Lock "6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 731.039934] env[69994]: DEBUG nova.compute.utils [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 731.041432] env[69994]: DEBUG nova.compute.manager [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 731.046136] env[69994]: DEBUG nova.network.neutron [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 731.048882] env[69994]: DEBUG oslo_vmware.api [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241540, 'name': CloneVM_Task} progress is 95%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.132853] env[69994]: DEBUG nova.policy [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2ccfdf4c5e604bb3a5eca0ac5727774c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e5acf9a4a9344d4c9c91b75e83cf7a76', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 731.145326] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3641fa74-6625-485f-a2e9-de9667c71d26 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 731.145521] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3641fa74-6625-485f-a2e9-de9667c71d26 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 731.145713] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3641fa74-6625-485f-a2e9-de9667c71d26 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Deleting the datastore file [datastore1] e8b4640f-302d-43cd-a654-c42f9cb34766 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 731.146017] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-efc69c36-d1b0-4136-9858-1731ec890ee4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.154287] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-46f5be88-6d55-419b-9741-c02dd8a3f5df tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 731.154483] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-46f5be88-6d55-419b-9741-c02dd8a3f5df tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 731.154658] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-46f5be88-6d55-419b-9741-c02dd8a3f5df tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Deleting the datastore file [datastore1] 493c2d85-eef5-44ae-acfc-2744685135ca {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 731.156739] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-612a9417-d894-4961-8535-34f6e3b57f7c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.157804] env[69994]: INFO nova.compute.manager [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Rescuing [ 731.158054] env[69994]: DEBUG oslo_concurrency.lockutils [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Acquiring lock "refresh_cache-ce6f9a88-faa8-442e-8b48-64979dd2d03e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.158335] env[69994]: DEBUG oslo_concurrency.lockutils [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Acquired lock "refresh_cache-ce6f9a88-faa8-442e-8b48-64979dd2d03e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 731.158379] env[69994]: DEBUG nova.network.neutron [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 731.160535] env[69994]: DEBUG oslo_vmware.api [None req-3641fa74-6625-485f-a2e9-de9667c71d26 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 731.160535] env[69994]: value = "task-3241547" [ 731.160535] env[69994]: _type = "Task" [ 731.160535] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.166956] env[69994]: DEBUG oslo_vmware.api [None req-46f5be88-6d55-419b-9741-c02dd8a3f5df tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 731.166956] env[69994]: value = "task-3241548" [ 731.166956] env[69994]: _type = "Task" [ 731.166956] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.178683] env[69994]: DEBUG oslo_vmware.api [None req-3641fa74-6625-485f-a2e9-de9667c71d26 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241547, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.184240] env[69994]: DEBUG oslo_vmware.api [None req-46f5be88-6d55-419b-9741-c02dd8a3f5df tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241548, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.198238] env[69994]: DEBUG nova.compute.manager [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 731.361407] env[69994]: DEBUG oslo_vmware.api [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241546, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067176} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.361695] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 731.362502] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6100e03-06d0-4676-8920-d01add36e443 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.390338] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Reconfiguring VM instance instance-00000021 to attach disk [datastore2] 87473dd1-458d-4ef4-a1bd-7e653e509ea4/87473dd1-458d-4ef4-a1bd-7e653e509ea4.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 731.392768] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-06b1a094-1e16-4fad-b647-5a6be203858d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.414857] env[69994]: DEBUG oslo_vmware.api [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 731.414857] env[69994]: value = "task-3241549" [ 731.414857] env[69994]: _type = "Task" [ 731.414857] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.427042] env[69994]: DEBUG oslo_vmware.api [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241549, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.545096] env[69994]: DEBUG nova.compute.manager [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 731.547643] env[69994]: DEBUG oslo_vmware.api [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241540, 'name': CloneVM_Task, 'duration_secs': 1.709412} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.550591] env[69994]: INFO nova.virt.vmwareapi.vmops [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Created linked-clone VM from snapshot [ 731.551570] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7b48df3-2f0f-487e-a9e9-57f46c7cd3fb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.560452] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Uploading image 148b7a39-161a-4c6d-a856-8b0f6909f0dd {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 731.590351] env[69994]: DEBUG oslo_vmware.rw_handles [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 731.590351] env[69994]: value = "vm-647831" [ 731.590351] env[69994]: _type = "VirtualMachine" [ 731.590351] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 731.590723] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ffefe3ae-4a85-4482-be0f-0378e3e9f433 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.597579] env[69994]: DEBUG oslo_vmware.rw_handles [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Lease: (returnval){ [ 731.597579] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52d42c14-c046-0285-98e1-23cfb0bb5681" [ 731.597579] env[69994]: _type = "HttpNfcLease" [ 731.597579] env[69994]: } obtained for exporting VM: (result){ [ 731.597579] env[69994]: value = "vm-647831" [ 731.597579] env[69994]: _type = "VirtualMachine" [ 731.597579] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 731.597847] env[69994]: DEBUG oslo_vmware.api [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Waiting for the lease: (returnval){ [ 731.597847] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52d42c14-c046-0285-98e1-23cfb0bb5681" [ 731.597847] env[69994]: _type = "HttpNfcLease" [ 731.597847] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 731.605501] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 731.605501] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52d42c14-c046-0285-98e1-23cfb0bb5681" [ 731.605501] env[69994]: _type = "HttpNfcLease" [ 731.605501] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 731.679921] env[69994]: DEBUG oslo_vmware.api [None req-3641fa74-6625-485f-a2e9-de9667c71d26 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241547, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.192918} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.681654] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3641fa74-6625-485f-a2e9-de9667c71d26 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 731.681925] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3641fa74-6625-485f-a2e9-de9667c71d26 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 731.682924] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3641fa74-6625-485f-a2e9-de9667c71d26 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 731.682924] env[69994]: INFO nova.compute.manager [None req-3641fa74-6625-485f-a2e9-de9667c71d26 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Took 2.05 seconds to destroy the instance on the hypervisor. [ 731.682924] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3641fa74-6625-485f-a2e9-de9667c71d26 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 731.682924] env[69994]: DEBUG oslo_vmware.api [None req-46f5be88-6d55-419b-9741-c02dd8a3f5df tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241548, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.186554} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.690727] env[69994]: DEBUG nova.compute.manager [-] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 731.690727] env[69994]: DEBUG nova.network.neutron [-] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 731.690850] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-46f5be88-6d55-419b-9741-c02dd8a3f5df tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 731.691018] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-46f5be88-6d55-419b-9741-c02dd8a3f5df tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 731.691193] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-46f5be88-6d55-419b-9741-c02dd8a3f5df tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 731.691358] env[69994]: INFO nova.compute.manager [None req-46f5be88-6d55-419b-9741-c02dd8a3f5df tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Took 1.86 seconds to destroy the instance on the hypervisor. [ 731.691580] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-46f5be88-6d55-419b-9741-c02dd8a3f5df tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 731.692177] env[69994]: DEBUG nova.compute.manager [-] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 731.692572] env[69994]: DEBUG nova.network.neutron [-] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 731.723261] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 731.792858] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6995bc9-0c2d-4222-b48f-fa7ca18611b7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.803824] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06330bf8-4529-44b4-948e-6d90086d2cf7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.835348] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c82f94a0-77e5-41bb-a85d-07313fa71f19 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.843093] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92f387f6-f01a-4625-ada0-9e1b3915a2cb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.858533] env[69994]: DEBUG nova.compute.provider_tree [None req-354c995b-f30f-4d82-86f7-441e948afa00 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 731.926026] env[69994]: DEBUG oslo_vmware.api [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241549, 'name': ReconfigVM_Task, 'duration_secs': 0.274097} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.927648] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Reconfigured VM instance instance-00000021 to attach disk [datastore2] 87473dd1-458d-4ef4-a1bd-7e653e509ea4/87473dd1-458d-4ef4-a1bd-7e653e509ea4.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 731.928347] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d8aebf4e-4492-4413-af2f-ac5581d580a4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.935295] env[69994]: DEBUG oslo_vmware.api [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 731.935295] env[69994]: value = "task-3241551" [ 731.935295] env[69994]: _type = "Task" [ 731.935295] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.943550] env[69994]: DEBUG oslo_vmware.api [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241551, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.998033] env[69994]: DEBUG nova.network.neutron [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Successfully created port: 601a8fe0-5f6c-4f29-860b-193a32a1e99c {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 732.107613] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 732.107613] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52d42c14-c046-0285-98e1-23cfb0bb5681" [ 732.107613] env[69994]: _type = "HttpNfcLease" [ 732.107613] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 732.109345] env[69994]: DEBUG oslo_vmware.rw_handles [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 732.109345] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52d42c14-c046-0285-98e1-23cfb0bb5681" [ 732.109345] env[69994]: _type = "HttpNfcLease" [ 732.109345] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 732.110833] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6352c201-177a-4a94-ab38-291e2da006d6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.119344] env[69994]: DEBUG oslo_vmware.rw_handles [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5293b9ce-a056-9512-3415-07d462ecd9e0/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 732.119615] env[69994]: DEBUG oslo_vmware.rw_handles [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5293b9ce-a056-9512-3415-07d462ecd9e0/disk-0.vmdk for reading. {{(pid=69994) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 732.185372] env[69994]: DEBUG nova.compute.manager [req-7aec19b7-ca21-4ee4-acc3-20345b63a804 req-c3944c0f-f35c-4b0e-a38c-6f4605868aa8 service nova] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Received event network-vif-deleted-c71264f5-b3f6-418a-8777-e4ef4b9895e0 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 732.185675] env[69994]: INFO nova.compute.manager [req-7aec19b7-ca21-4ee4-acc3-20345b63a804 req-c3944c0f-f35c-4b0e-a38c-6f4605868aa8 service nova] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Neutron deleted interface c71264f5-b3f6-418a-8777-e4ef4b9895e0; detaching it from the instance and deleting it from the info cache [ 732.185908] env[69994]: DEBUG nova.network.neutron [req-7aec19b7-ca21-4ee4-acc3-20345b63a804 req-c3944c0f-f35c-4b0e-a38c-6f4605868aa8 service nova] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.217526] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-28256ae3-fdd8-4336-af45-9f4fecd3b718 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.275608] env[69994]: DEBUG nova.network.neutron [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Updating instance_info_cache with network_info: [{"id": "18ed1bc7-f241-4d6e-83f9-4df1b8b70c45", "address": "fa:16:3e:4b:69:6f", "network": {"id": "74734ae5-86fb-4652-8203-198f5d02a70f", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-585354620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "dfa35e60b54941dfbfb8671758ccd039", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18ed1bc7-f2", "ovs_interfaceid": "18ed1bc7-f241-4d6e-83f9-4df1b8b70c45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.362033] env[69994]: DEBUG nova.scheduler.client.report [None req-354c995b-f30f-4d82-86f7-441e948afa00 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 732.447845] env[69994]: DEBUG oslo_vmware.api [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241551, 'name': Rename_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.566025] env[69994]: DEBUG nova.compute.manager [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 732.599595] env[69994]: DEBUG nova.virt.hardware [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 732.600944] env[69994]: DEBUG nova.virt.hardware [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 732.601181] env[69994]: DEBUG nova.virt.hardware [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 732.601445] env[69994]: DEBUG nova.virt.hardware [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 732.601661] env[69994]: DEBUG nova.virt.hardware [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 732.601886] env[69994]: DEBUG nova.virt.hardware [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 732.602222] env[69994]: DEBUG nova.virt.hardware [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 732.602386] env[69994]: DEBUG nova.virt.hardware [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 732.602580] env[69994]: DEBUG nova.virt.hardware [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 732.602736] env[69994]: DEBUG nova.virt.hardware [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 732.602923] env[69994]: DEBUG nova.virt.hardware [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 732.604069] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27b5e4b1-65c9-46dc-9b68-5f51018f3218 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.608261] env[69994]: DEBUG nova.network.neutron [-] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.610132] env[69994]: DEBUG nova.network.neutron [-] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.615290] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26266e15-bd5b-433b-918f-fc964ed2684d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.690477] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bbf19991-ed11-4185-bd82-b0d60fb662e5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.703407] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0beb88f8-fba6-46e9-b6aa-fb9d679d9c27 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.739871] env[69994]: DEBUG nova.compute.manager [req-7aec19b7-ca21-4ee4-acc3-20345b63a804 req-c3944c0f-f35c-4b0e-a38c-6f4605868aa8 service nova] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Detach interface failed, port_id=c71264f5-b3f6-418a-8777-e4ef4b9895e0, reason: Instance e8b4640f-302d-43cd-a654-c42f9cb34766 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 732.781403] env[69994]: DEBUG oslo_concurrency.lockutils [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Releasing lock "refresh_cache-ce6f9a88-faa8-442e-8b48-64979dd2d03e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 732.867476] env[69994]: DEBUG oslo_concurrency.lockutils [None req-354c995b-f30f-4d82-86f7-441e948afa00 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.337s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 732.869922] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.851s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 732.883774] env[69994]: INFO nova.compute.claims [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 732.935138] env[69994]: INFO nova.scheduler.client.report [None req-354c995b-f30f-4d82-86f7-441e948afa00 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Deleted allocations for instance ad957c30-c923-4bbf-8841-00e99de44781 [ 732.955314] env[69994]: DEBUG oslo_vmware.api [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241551, 'name': Rename_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.110938] env[69994]: INFO nova.compute.manager [-] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Took 1.42 seconds to deallocate network for instance. [ 733.117525] env[69994]: INFO nova.compute.manager [-] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Took 1.42 seconds to deallocate network for instance. [ 733.458543] env[69994]: DEBUG oslo_vmware.api [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241551, 'name': Rename_Task, 'duration_secs': 1.171158} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.458850] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 733.459148] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-008eb30c-f362-4542-b486-76783b1a8343 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.462503] env[69994]: DEBUG oslo_concurrency.lockutils [None req-354c995b-f30f-4d82-86f7-441e948afa00 tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Lock "ad957c30-c923-4bbf-8841-00e99de44781" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.800s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 733.469590] env[69994]: DEBUG oslo_vmware.api [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 733.469590] env[69994]: value = "task-3241552" [ 733.469590] env[69994]: _type = "Task" [ 733.469590] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.484017] env[69994]: DEBUG oslo_vmware.api [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241552, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.619591] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3641fa74-6625-485f-a2e9-de9667c71d26 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 733.623698] env[69994]: DEBUG oslo_concurrency.lockutils [None req-46f5be88-6d55-419b-9741-c02dd8a3f5df tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 733.840108] env[69994]: DEBUG oslo_concurrency.lockutils [None req-084ba117-8c3c-4aee-a0d2-9de0cdc9b9b7 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] Acquiring lock "53a8714c-50f7-4990-a3d9-86f8fc908d03" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 733.840516] env[69994]: DEBUG oslo_concurrency.lockutils [None req-084ba117-8c3c-4aee-a0d2-9de0cdc9b9b7 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] Lock "53a8714c-50f7-4990-a3d9-86f8fc908d03" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 733.985122] env[69994]: DEBUG oslo_vmware.api [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241552, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.318126] env[69994]: DEBUG nova.network.neutron [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Successfully updated port: 601a8fe0-5f6c-4f29-860b-193a32a1e99c {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 734.339643] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 734.340575] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0d3cd80a-7663-471a-bbcd-f70eb051a486 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.343510] env[69994]: DEBUG nova.compute.utils [None req-084ba117-8c3c-4aee-a0d2-9de0cdc9b9b7 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 734.350100] env[69994]: DEBUG oslo_vmware.api [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Waiting for the task: (returnval){ [ 734.350100] env[69994]: value = "task-3241553" [ 734.350100] env[69994]: _type = "Task" [ 734.350100] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.361147] env[69994]: DEBUG oslo_vmware.api [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Task: {'id': task-3241553, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.482040] env[69994]: DEBUG oslo_vmware.api [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241552, 'name': PowerOnVM_Task, 'duration_secs': 0.722303} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.482210] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 734.482433] env[69994]: INFO nova.compute.manager [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Took 10.35 seconds to spawn the instance on the hypervisor. [ 734.482649] env[69994]: DEBUG nova.compute.manager [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 734.486666] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a68e819-2b3c-47a1-b044-7186528186da {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.560043] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bbd4abb-366d-4a92-9656-6f8a6a8a5d11 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.569662] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51389cb3-0ea8-48d6-9189-219c0c450faa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.608472] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-684743dd-7373-4f01-896d-432a90d93543 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.616220] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9abcb11a-f6f3-4b94-ae54-bc798f2174f7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.630834] env[69994]: DEBUG nova.compute.provider_tree [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 734.829790] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquiring lock "refresh_cache-e8caf244-413b-49bb-bdff-79aca0ccbc2b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.829937] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquired lock "refresh_cache-e8caf244-413b-49bb-bdff-79aca0ccbc2b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 734.830443] env[69994]: DEBUG nova.network.neutron [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 734.849393] env[69994]: DEBUG oslo_concurrency.lockutils [None req-084ba117-8c3c-4aee-a0d2-9de0cdc9b9b7 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] Lock "53a8714c-50f7-4990-a3d9-86f8fc908d03" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 734.860205] env[69994]: DEBUG oslo_vmware.api [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Task: {'id': task-3241553, 'name': PowerOffVM_Task, 'duration_secs': 0.207216} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.860328] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 734.861545] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1613bb06-27a2-419d-b956-c9e6bcc495b7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.880870] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20fc6374-f988-44a6-9798-3474fad44323 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.910533] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 734.910851] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b062e5e5-ced5-4b3e-ab02-9e7b904cc0f1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.917423] env[69994]: DEBUG oslo_vmware.api [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Waiting for the task: (returnval){ [ 734.917423] env[69994]: value = "task-3241554" [ 734.917423] env[69994]: _type = "Task" [ 734.917423] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.925561] env[69994]: DEBUG oslo_vmware.api [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Task: {'id': task-3241554, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.011454] env[69994]: INFO nova.compute.manager [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Took 51.13 seconds to build instance. [ 735.064228] env[69994]: DEBUG nova.compute.manager [req-ac1adaf3-7d57-4006-b922-ba02d1f2ca0c req-1460733f-5f46-4d67-838a-94b0c67b92e6 service nova] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Received event network-vif-deleted-5849490a-d0e4-4330-b929-ead7f7ce0fd7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 735.133813] env[69994]: DEBUG nova.scheduler.client.report [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 735.152324] env[69994]: DEBUG nova.compute.manager [req-dacec0e5-f0af-469c-b03f-13b9e2d1e6b6 req-dff9dafc-7828-4b7c-a138-f918ed16c39b service nova] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Received event network-vif-plugged-601a8fe0-5f6c-4f29-860b-193a32a1e99c {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 735.152481] env[69994]: DEBUG oslo_concurrency.lockutils [req-dacec0e5-f0af-469c-b03f-13b9e2d1e6b6 req-dff9dafc-7828-4b7c-a138-f918ed16c39b service nova] Acquiring lock "e8caf244-413b-49bb-bdff-79aca0ccbc2b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 735.152970] env[69994]: DEBUG oslo_concurrency.lockutils [req-dacec0e5-f0af-469c-b03f-13b9e2d1e6b6 req-dff9dafc-7828-4b7c-a138-f918ed16c39b service nova] Lock "e8caf244-413b-49bb-bdff-79aca0ccbc2b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 735.152970] env[69994]: DEBUG oslo_concurrency.lockutils [req-dacec0e5-f0af-469c-b03f-13b9e2d1e6b6 req-dff9dafc-7828-4b7c-a138-f918ed16c39b service nova] Lock "e8caf244-413b-49bb-bdff-79aca0ccbc2b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 735.153145] env[69994]: DEBUG nova.compute.manager [req-dacec0e5-f0af-469c-b03f-13b9e2d1e6b6 req-dff9dafc-7828-4b7c-a138-f918ed16c39b service nova] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] No waiting events found dispatching network-vif-plugged-601a8fe0-5f6c-4f29-860b-193a32a1e99c {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 735.153340] env[69994]: WARNING nova.compute.manager [req-dacec0e5-f0af-469c-b03f-13b9e2d1e6b6 req-dff9dafc-7828-4b7c-a138-f918ed16c39b service nova] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Received unexpected event network-vif-plugged-601a8fe0-5f6c-4f29-860b-193a32a1e99c for instance with vm_state building and task_state spawning. [ 735.344816] env[69994]: DEBUG oslo_concurrency.lockutils [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Acquiring lock "2d812174-d2ad-4fac-8ae5-ffa51d691374" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 735.345594] env[69994]: DEBUG oslo_concurrency.lockutils [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Lock "2d812174-d2ad-4fac-8ae5-ffa51d691374" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 735.393131] env[69994]: DEBUG nova.network.neutron [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 735.428207] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] VM already powered off {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 735.428428] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 735.428647] env[69994]: DEBUG oslo_concurrency.lockutils [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.428805] env[69994]: DEBUG oslo_concurrency.lockutils [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 735.428974] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 735.429848] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f2430cfe-9e3c-42f6-8f65-f2f9e8711b90 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.440855] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 735.440855] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 735.440855] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6262eed-dd2b-4625-911c-c167cae74099 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.449256] env[69994]: DEBUG oslo_vmware.api [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Waiting for the task: (returnval){ [ 735.449256] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52fd4c90-b794-693b-2342-5ee41cc8773c" [ 735.449256] env[69994]: _type = "Task" [ 735.449256] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.467524] env[69994]: DEBUG oslo_vmware.api [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52fd4c90-b794-693b-2342-5ee41cc8773c, 'name': SearchDatastore_Task, 'duration_secs': 0.014012} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.468588] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17ea1cbf-dc5d-4c6f-99ca-12198957222b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.475014] env[69994]: DEBUG oslo_vmware.api [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Waiting for the task: (returnval){ [ 735.475014] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52e3f696-492e-49fc-9f54-ea387c3efa6e" [ 735.475014] env[69994]: _type = "Task" [ 735.475014] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.485858] env[69994]: DEBUG oslo_vmware.api [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52e3f696-492e-49fc-9f54-ea387c3efa6e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.514350] env[69994]: DEBUG oslo_concurrency.lockutils [None req-458665e4-d101-4a71-9e5e-f2221d07fc05 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "87473dd1-458d-4ef4-a1bd-7e653e509ea4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.044s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 735.600830] env[69994]: DEBUG nova.network.neutron [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Updating instance_info_cache with network_info: [{"id": "601a8fe0-5f6c-4f29-860b-193a32a1e99c", "address": "fa:16:3e:2e:9e:a2", "network": {"id": "dc86f717-1eb9-419c-aa6f-f5dd10804e22", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-830745446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5acf9a4a9344d4c9c91b75e83cf7a76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c66a277b-e3bf-43b8-a632-04fdd0720b91", "external-id": "nsx-vlan-transportzone-665", "segmentation_id": 665, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap601a8fe0-5f", "ovs_interfaceid": "601a8fe0-5f6c-4f29-860b-193a32a1e99c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.630036] env[69994]: DEBUG oslo_concurrency.lockutils [None req-387d9f4f-ac53-476a-a1fc-2fed7cf95e3e tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Acquiring lock "2244e8ad-75f6-42bc-a97d-7f26eaba1aa2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 735.630036] env[69994]: DEBUG oslo_concurrency.lockutils [None req-387d9f4f-ac53-476a-a1fc-2fed7cf95e3e tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Lock "2244e8ad-75f6-42bc-a97d-7f26eaba1aa2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 735.630036] env[69994]: DEBUG oslo_concurrency.lockutils [None req-387d9f4f-ac53-476a-a1fc-2fed7cf95e3e tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Acquiring lock "2244e8ad-75f6-42bc-a97d-7f26eaba1aa2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 735.630036] env[69994]: DEBUG oslo_concurrency.lockutils [None req-387d9f4f-ac53-476a-a1fc-2fed7cf95e3e tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Lock "2244e8ad-75f6-42bc-a97d-7f26eaba1aa2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 735.630408] env[69994]: DEBUG oslo_concurrency.lockutils [None req-387d9f4f-ac53-476a-a1fc-2fed7cf95e3e tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Lock "2244e8ad-75f6-42bc-a97d-7f26eaba1aa2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 735.632339] env[69994]: INFO nova.compute.manager [None req-387d9f4f-ac53-476a-a1fc-2fed7cf95e3e tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Terminating instance [ 735.639720] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.770s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 735.640282] env[69994]: DEBUG nova.compute.manager [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 735.643267] env[69994]: DEBUG oslo_concurrency.lockutils [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.852s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 735.644611] env[69994]: INFO nova.compute.claims [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 735.963436] env[69994]: DEBUG oslo_concurrency.lockutils [None req-084ba117-8c3c-4aee-a0d2-9de0cdc9b9b7 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] Acquiring lock "53a8714c-50f7-4990-a3d9-86f8fc908d03" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 735.963564] env[69994]: DEBUG oslo_concurrency.lockutils [None req-084ba117-8c3c-4aee-a0d2-9de0cdc9b9b7 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] Lock "53a8714c-50f7-4990-a3d9-86f8fc908d03" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 735.963749] env[69994]: INFO nova.compute.manager [None req-084ba117-8c3c-4aee-a0d2-9de0cdc9b9b7 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Attaching volume 50fa937b-e572-4847-88c1-bfd627eacc5e to /dev/sdb [ 735.986385] env[69994]: DEBUG oslo_vmware.api [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52e3f696-492e-49fc-9f54-ea387c3efa6e, 'name': SearchDatastore_Task, 'duration_secs': 0.014559} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.986641] env[69994]: DEBUG oslo_concurrency.lockutils [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 735.986899] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] ce6f9a88-faa8-442e-8b48-64979dd2d03e/cc2e14cc-b12f-480a-a387-dd21e9efda8b-rescue.vmdk. {{(pid=69994) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 735.988070] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fb1d0e49-0266-4be0-81ab-787d6e11f746 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.996368] env[69994]: DEBUG oslo_vmware.api [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Waiting for the task: (returnval){ [ 735.996368] env[69994]: value = "task-3241555" [ 735.996368] env[69994]: _type = "Task" [ 735.996368] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.000559] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8627a404-9acd-46f5-b25b-e729f3790785 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.013496] env[69994]: DEBUG oslo_vmware.api [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Task: {'id': task-3241555, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.014243] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdab7c68-90d6-40d2-8ae2-51dbc815dd9b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.019392] env[69994]: DEBUG nova.compute.manager [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 736.029076] env[69994]: DEBUG nova.virt.block_device [None req-084ba117-8c3c-4aee-a0d2-9de0cdc9b9b7 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Updating existing volume attachment record: 241405c3-cd71-4e6f-a69f-5deebdeb777b {{(pid=69994) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 736.105111] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Releasing lock "refresh_cache-e8caf244-413b-49bb-bdff-79aca0ccbc2b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 736.105568] env[69994]: DEBUG nova.compute.manager [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Instance network_info: |[{"id": "601a8fe0-5f6c-4f29-860b-193a32a1e99c", "address": "fa:16:3e:2e:9e:a2", "network": {"id": "dc86f717-1eb9-419c-aa6f-f5dd10804e22", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-830745446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5acf9a4a9344d4c9c91b75e83cf7a76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c66a277b-e3bf-43b8-a632-04fdd0720b91", "external-id": "nsx-vlan-transportzone-665", "segmentation_id": 665, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap601a8fe0-5f", "ovs_interfaceid": "601a8fe0-5f6c-4f29-860b-193a32a1e99c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 736.105943] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2e:9e:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c66a277b-e3bf-43b8-a632-04fdd0720b91', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '601a8fe0-5f6c-4f29-860b-193a32a1e99c', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 736.115596] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 736.115862] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 736.117253] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1821b807-f537-454a-a964-28e54970af61 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.137897] env[69994]: DEBUG nova.compute.manager [None req-387d9f4f-ac53-476a-a1fc-2fed7cf95e3e tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 736.138182] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-387d9f4f-ac53-476a-a1fc-2fed7cf95e3e tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 736.140237] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d23567b6-d3a3-47f4-ac13-726614769386 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.144610] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 736.144610] env[69994]: value = "task-3241556" [ 736.144610] env[69994]: _type = "Task" [ 736.144610] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.149582] env[69994]: DEBUG nova.compute.utils [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 736.156174] env[69994]: DEBUG nova.compute.manager [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 736.156396] env[69994]: DEBUG nova.network.neutron [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 736.158683] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-387d9f4f-ac53-476a-a1fc-2fed7cf95e3e tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 736.159175] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-546b73a0-78d7-41df-87de-add2b195653e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.165415] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241556, 'name': CreateVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.171634] env[69994]: DEBUG oslo_vmware.api [None req-387d9f4f-ac53-476a-a1fc-2fed7cf95e3e tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Waiting for the task: (returnval){ [ 736.171634] env[69994]: value = "task-3241557" [ 736.171634] env[69994]: _type = "Task" [ 736.171634] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.181217] env[69994]: DEBUG oslo_vmware.api [None req-387d9f4f-ac53-476a-a1fc-2fed7cf95e3e tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241557, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.207205] env[69994]: DEBUG nova.policy [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '47b53da395744b6784ca0907a79b1875', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '91697b639b15438297eec6880a72f444', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 736.508552] env[69994]: DEBUG oslo_vmware.api [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Task: {'id': task-3241555, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.542242] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 736.657618] env[69994]: DEBUG nova.compute.manager [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 736.660657] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241556, 'name': CreateVM_Task, 'duration_secs': 0.384357} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.663376] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 736.664642] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.664642] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 736.664845] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 736.665096] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2377213c-793c-4899-8358-a3e3cdb608b0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.672822] env[69994]: DEBUG oslo_vmware.api [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 736.672822] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]520ff660-c192-ec41-1c8c-05f3ac8386c5" [ 736.672822] env[69994]: _type = "Task" [ 736.672822] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.693131] env[69994]: DEBUG oslo_vmware.api [None req-387d9f4f-ac53-476a-a1fc-2fed7cf95e3e tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241557, 'name': PowerOffVM_Task, 'duration_secs': 0.231711} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.693131] env[69994]: DEBUG oslo_vmware.api [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]520ff660-c192-ec41-1c8c-05f3ac8386c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.697434] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-387d9f4f-ac53-476a-a1fc-2fed7cf95e3e tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 736.697779] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-387d9f4f-ac53-476a-a1fc-2fed7cf95e3e tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 736.698465] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-675ea4a9-daea-41f1-9929-02147719e733 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.738510] env[69994]: DEBUG nova.network.neutron [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Successfully created port: 46e255ba-a2d4-4bd1-942b-f18624bd0198 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 736.780180] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-387d9f4f-ac53-476a-a1fc-2fed7cf95e3e tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 736.780399] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-387d9f4f-ac53-476a-a1fc-2fed7cf95e3e tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 736.780587] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-387d9f4f-ac53-476a-a1fc-2fed7cf95e3e tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Deleting the datastore file [datastore1] 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 736.780850] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9a59048e-c2ce-4acc-b6e7-a6484e865afd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.789283] env[69994]: DEBUG oslo_vmware.api [None req-387d9f4f-ac53-476a-a1fc-2fed7cf95e3e tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Waiting for the task: (returnval){ [ 736.789283] env[69994]: value = "task-3241563" [ 736.789283] env[69994]: _type = "Task" [ 736.789283] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.798476] env[69994]: DEBUG oslo_vmware.api [None req-387d9f4f-ac53-476a-a1fc-2fed7cf95e3e tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241563, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.011329] env[69994]: DEBUG oslo_vmware.api [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Task: {'id': task-3241555, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.692728} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.011652] env[69994]: INFO nova.virt.vmwareapi.ds_util [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] ce6f9a88-faa8-442e-8b48-64979dd2d03e/cc2e14cc-b12f-480a-a387-dd21e9efda8b-rescue.vmdk. [ 737.012549] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59d12b09-3ae5-4be9-bf1c-63865e840a2b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.045563] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Reconfiguring VM instance instance-00000020 to attach disk [datastore2] ce6f9a88-faa8-442e-8b48-64979dd2d03e/cc2e14cc-b12f-480a-a387-dd21e9efda8b-rescue.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 737.048864] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f0f1041-6c6b-420e-be6d-1c833e9a4415 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.071183] env[69994]: DEBUG oslo_vmware.api [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Waiting for the task: (returnval){ [ 737.071183] env[69994]: value = "task-3241564" [ 737.071183] env[69994]: _type = "Task" [ 737.071183] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.088624] env[69994]: DEBUG oslo_vmware.api [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Task: {'id': task-3241564, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.190710] env[69994]: DEBUG oslo_vmware.api [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]520ff660-c192-ec41-1c8c-05f3ac8386c5, 'name': SearchDatastore_Task, 'duration_secs': 0.015782} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.191148] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 737.191554] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 737.191835] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.192033] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 737.192250] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 737.192542] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0efc0de9-4595-479e-9c6b-c9b95aeb73c2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.204829] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 737.204829] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 737.208088] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef62f534-66b9-46a8-b698-007b1addaa34 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.215979] env[69994]: DEBUG oslo_vmware.api [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 737.215979] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]529dc463-8613-635f-f0c3-42f4aac8c2a9" [ 737.215979] env[69994]: _type = "Task" [ 737.215979] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.231460] env[69994]: DEBUG oslo_vmware.api [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]529dc463-8613-635f-f0c3-42f4aac8c2a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.307429] env[69994]: DEBUG oslo_vmware.api [None req-387d9f4f-ac53-476a-a1fc-2fed7cf95e3e tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Task: {'id': task-3241563, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.209331} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.307761] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-387d9f4f-ac53-476a-a1fc-2fed7cf95e3e tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 737.308070] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-387d9f4f-ac53-476a-a1fc-2fed7cf95e3e tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 737.308533] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-387d9f4f-ac53-476a-a1fc-2fed7cf95e3e tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 737.308741] env[69994]: INFO nova.compute.manager [None req-387d9f4f-ac53-476a-a1fc-2fed7cf95e3e tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Took 1.17 seconds to destroy the instance on the hypervisor. [ 737.308892] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-387d9f4f-ac53-476a-a1fc-2fed7cf95e3e tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 737.313297] env[69994]: DEBUG nova.compute.manager [-] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 737.313444] env[69994]: DEBUG nova.network.neutron [-] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 737.354383] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d2a025c-4c77-4673-b0aa-be0de83c93ff {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.365512] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c8c7170-5c8d-4928-805a-6d71c2bcfb71 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.404803] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6982dedf-2595-4c5d-b507-939d0ac28ea9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.408858] env[69994]: DEBUG nova.compute.manager [req-5de18892-9e68-4996-a15b-48069325a11e req-cbba06df-d5ec-4581-ab44-58aae2fa1cb7 service nova] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Received event network-changed-601a8fe0-5f6c-4f29-860b-193a32a1e99c {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 737.409091] env[69994]: DEBUG nova.compute.manager [req-5de18892-9e68-4996-a15b-48069325a11e req-cbba06df-d5ec-4581-ab44-58aae2fa1cb7 service nova] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Refreshing instance network info cache due to event network-changed-601a8fe0-5f6c-4f29-860b-193a32a1e99c. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 737.409312] env[69994]: DEBUG oslo_concurrency.lockutils [req-5de18892-9e68-4996-a15b-48069325a11e req-cbba06df-d5ec-4581-ab44-58aae2fa1cb7 service nova] Acquiring lock "refresh_cache-e8caf244-413b-49bb-bdff-79aca0ccbc2b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.409488] env[69994]: DEBUG oslo_concurrency.lockutils [req-5de18892-9e68-4996-a15b-48069325a11e req-cbba06df-d5ec-4581-ab44-58aae2fa1cb7 service nova] Acquired lock "refresh_cache-e8caf244-413b-49bb-bdff-79aca0ccbc2b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 737.409681] env[69994]: DEBUG nova.network.neutron [req-5de18892-9e68-4996-a15b-48069325a11e req-cbba06df-d5ec-4581-ab44-58aae2fa1cb7 service nova] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Refreshing network info cache for port 601a8fe0-5f6c-4f29-860b-193a32a1e99c {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 737.417612] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cbd9ad9-ef76-43eb-9cb1-7e8b99d44b29 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.436309] env[69994]: DEBUG nova.compute.provider_tree [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 737.583312] env[69994]: DEBUG oslo_vmware.api [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Task: {'id': task-3241564, 'name': ReconfigVM_Task, 'duration_secs': 0.39093} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.584136] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Reconfigured VM instance instance-00000020 to attach disk [datastore2] ce6f9a88-faa8-442e-8b48-64979dd2d03e/cc2e14cc-b12f-480a-a387-dd21e9efda8b-rescue.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 737.584719] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91842b4a-09eb-42fc-92c0-811890ff92c8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.611622] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f2bbd856-b41c-490b-9ccd-58fb2491c865 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.639056] env[69994]: DEBUG oslo_vmware.api [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Waiting for the task: (returnval){ [ 737.639056] env[69994]: value = "task-3241565" [ 737.639056] env[69994]: _type = "Task" [ 737.639056] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.646670] env[69994]: DEBUG oslo_vmware.api [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Task: {'id': task-3241565, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.670436] env[69994]: DEBUG nova.compute.manager [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 737.708140] env[69994]: DEBUG nova.virt.hardware [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 737.708140] env[69994]: DEBUG nova.virt.hardware [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 737.708140] env[69994]: DEBUG nova.virt.hardware [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 737.708433] env[69994]: DEBUG nova.virt.hardware [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 737.708597] env[69994]: DEBUG nova.virt.hardware [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 737.708919] env[69994]: DEBUG nova.virt.hardware [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 737.709321] env[69994]: DEBUG nova.virt.hardware [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 737.709663] env[69994]: DEBUG nova.virt.hardware [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 737.709989] env[69994]: DEBUG nova.virt.hardware [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 737.710595] env[69994]: DEBUG nova.virt.hardware [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 737.711053] env[69994]: DEBUG nova.virt.hardware [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 737.712940] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2084ed55-3f65-4b0b-96ee-3ee8ae7f4b7a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.731609] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0be78b40-5a51-48e2-b56d-58d34e9b7665 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.736502] env[69994]: DEBUG oslo_vmware.api [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]529dc463-8613-635f-f0c3-42f4aac8c2a9, 'name': SearchDatastore_Task, 'duration_secs': 0.01734} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.738049] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-446ae1cc-a906-4a93-aae0-8f40cb839041 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.752772] env[69994]: DEBUG oslo_vmware.api [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 737.752772] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5295cda0-7afd-0d69-6729-99bcfe5be4db" [ 737.752772] env[69994]: _type = "Task" [ 737.752772] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.763254] env[69994]: DEBUG oslo_vmware.api [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5295cda0-7afd-0d69-6729-99bcfe5be4db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.960599] env[69994]: ERROR nova.scheduler.client.report [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] [req-415a26c3-cc80-445b-b3e0-3119889bcb87] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 92ce3c95-4efe-4d04-802b-6b187afc5aa7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-415a26c3-cc80-445b-b3e0-3119889bcb87"}]} [ 737.980428] env[69994]: DEBUG nova.scheduler.client.report [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Refreshing inventories for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 737.995249] env[69994]: DEBUG nova.scheduler.client.report [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Updating ProviderTree inventory for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 737.995489] env[69994]: DEBUG nova.compute.provider_tree [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 738.008181] env[69994]: DEBUG nova.scheduler.client.report [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Refreshing aggregate associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 738.027431] env[69994]: DEBUG nova.scheduler.client.report [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Refreshing trait associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 738.150209] env[69994]: DEBUG oslo_vmware.api [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Task: {'id': task-3241565, 'name': ReconfigVM_Task, 'duration_secs': 0.188496} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.151346] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 738.151346] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3a726902-bfc8-45a3-b3a5-658efd49db70 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.159094] env[69994]: DEBUG oslo_vmware.api [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Waiting for the task: (returnval){ [ 738.159094] env[69994]: value = "task-3241566" [ 738.159094] env[69994]: _type = "Task" [ 738.159094] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.169843] env[69994]: DEBUG nova.network.neutron [-] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.171159] env[69994]: DEBUG oslo_vmware.api [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Task: {'id': task-3241566, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.185740] env[69994]: DEBUG nova.network.neutron [req-5de18892-9e68-4996-a15b-48069325a11e req-cbba06df-d5ec-4581-ab44-58aae2fa1cb7 service nova] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Updated VIF entry in instance network info cache for port 601a8fe0-5f6c-4f29-860b-193a32a1e99c. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 738.186704] env[69994]: DEBUG nova.network.neutron [req-5de18892-9e68-4996-a15b-48069325a11e req-cbba06df-d5ec-4581-ab44-58aae2fa1cb7 service nova] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Updating instance_info_cache with network_info: [{"id": "601a8fe0-5f6c-4f29-860b-193a32a1e99c", "address": "fa:16:3e:2e:9e:a2", "network": {"id": "dc86f717-1eb9-419c-aa6f-f5dd10804e22", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-830745446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5acf9a4a9344d4c9c91b75e83cf7a76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c66a277b-e3bf-43b8-a632-04fdd0720b91", "external-id": "nsx-vlan-transportzone-665", "segmentation_id": 665, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap601a8fe0-5f", "ovs_interfaceid": "601a8fe0-5f6c-4f29-860b-193a32a1e99c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.267341] env[69994]: DEBUG oslo_vmware.api [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5295cda0-7afd-0d69-6729-99bcfe5be4db, 'name': SearchDatastore_Task, 'duration_secs': 0.018106} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.272259] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 738.272790] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] e8caf244-413b-49bb-bdff-79aca0ccbc2b/e8caf244-413b-49bb-bdff-79aca0ccbc2b.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 738.273379] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ce3f0064-e38a-47bd-a268-b81b855744b4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.283692] env[69994]: DEBUG oslo_vmware.api [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 738.283692] env[69994]: value = "task-3241567" [ 738.283692] env[69994]: _type = "Task" [ 738.283692] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.298210] env[69994]: DEBUG oslo_vmware.api [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241567, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.670812] env[69994]: DEBUG oslo_vmware.api [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Task: {'id': task-3241566, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.674188] env[69994]: INFO nova.compute.manager [-] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Took 1.36 seconds to deallocate network for instance. [ 738.689535] env[69994]: DEBUG oslo_concurrency.lockutils [req-5de18892-9e68-4996-a15b-48069325a11e req-cbba06df-d5ec-4581-ab44-58aae2fa1cb7 service nova] Releasing lock "refresh_cache-e8caf244-413b-49bb-bdff-79aca0ccbc2b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 738.693277] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-392f04d0-d9bf-4ab1-8bc8-e75bcc363a26 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.707042] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da87df0-0c8b-4a5b-b49b-6341a88fd887 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.748083] env[69994]: DEBUG nova.network.neutron [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Successfully updated port: 46e255ba-a2d4-4bd1-942b-f18624bd0198 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 738.749841] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c27c10df-99c6-4613-bf0e-a709619a45c8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.761109] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f73d9d1a-a2e0-46d5-bb45-441d4b0aecc3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.779722] env[69994]: DEBUG nova.compute.provider_tree [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 738.797530] env[69994]: DEBUG oslo_vmware.api [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241567, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.169899] env[69994]: DEBUG oslo_vmware.api [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Task: {'id': task-3241566, 'name': PowerOnVM_Task, 'duration_secs': 0.765652} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.170210] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 739.173118] env[69994]: DEBUG nova.compute.manager [None req-671ff5f0-ea27-466c-b639-d705d2cd37e3 tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 739.173808] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bb5b675-c640-4994-9009-a829a0e8542e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.185440] env[69994]: DEBUG oslo_concurrency.lockutils [None req-387d9f4f-ac53-476a-a1fc-2fed7cf95e3e tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 739.254290] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Acquiring lock "refresh_cache-744fe018-d12c-44c2-98f1-c11fbfffc98e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.254456] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Acquired lock "refresh_cache-744fe018-d12c-44c2-98f1-c11fbfffc98e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 739.254618] env[69994]: DEBUG nova.network.neutron [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 739.300279] env[69994]: DEBUG oslo_vmware.api [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241567, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.661967} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.300697] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] e8caf244-413b-49bb-bdff-79aca0ccbc2b/e8caf244-413b-49bb-bdff-79aca0ccbc2b.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 739.300981] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 739.301297] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-44479b29-e64b-427a-ba25-c8d2bb45698a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.309746] env[69994]: DEBUG oslo_vmware.api [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 739.309746] env[69994]: value = "task-3241569" [ 739.309746] env[69994]: _type = "Task" [ 739.309746] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.318957] env[69994]: DEBUG oslo_vmware.api [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241569, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.320547] env[69994]: DEBUG nova.scheduler.client.report [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Updated inventory for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with generation 64 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 739.320833] env[69994]: DEBUG nova.compute.provider_tree [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Updating resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 generation from 64 to 65 during operation: update_inventory {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 739.320951] env[69994]: DEBUG nova.compute.provider_tree [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 739.446281] env[69994]: DEBUG nova.compute.manager [req-1e7b0daf-e676-4a20-bf3c-ca9c4557bb9e req-0ed2dee1-8151-481a-84f9-cf2c9798de05 service nova] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Received event network-vif-deleted-b1e9d712-4ee9-4431-b7ed-f8221fb33b62 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 739.446510] env[69994]: DEBUG nova.compute.manager [req-1e7b0daf-e676-4a20-bf3c-ca9c4557bb9e req-0ed2dee1-8151-481a-84f9-cf2c9798de05 service nova] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Received event network-vif-plugged-46e255ba-a2d4-4bd1-942b-f18624bd0198 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 739.446715] env[69994]: DEBUG oslo_concurrency.lockutils [req-1e7b0daf-e676-4a20-bf3c-ca9c4557bb9e req-0ed2dee1-8151-481a-84f9-cf2c9798de05 service nova] Acquiring lock "744fe018-d12c-44c2-98f1-c11fbfffc98e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 739.446942] env[69994]: DEBUG oslo_concurrency.lockutils [req-1e7b0daf-e676-4a20-bf3c-ca9c4557bb9e req-0ed2dee1-8151-481a-84f9-cf2c9798de05 service nova] Lock "744fe018-d12c-44c2-98f1-c11fbfffc98e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 739.447131] env[69994]: DEBUG oslo_concurrency.lockutils [req-1e7b0daf-e676-4a20-bf3c-ca9c4557bb9e req-0ed2dee1-8151-481a-84f9-cf2c9798de05 service nova] Lock "744fe018-d12c-44c2-98f1-c11fbfffc98e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 739.447337] env[69994]: DEBUG nova.compute.manager [req-1e7b0daf-e676-4a20-bf3c-ca9c4557bb9e req-0ed2dee1-8151-481a-84f9-cf2c9798de05 service nova] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] No waiting events found dispatching network-vif-plugged-46e255ba-a2d4-4bd1-942b-f18624bd0198 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 739.447547] env[69994]: WARNING nova.compute.manager [req-1e7b0daf-e676-4a20-bf3c-ca9c4557bb9e req-0ed2dee1-8151-481a-84f9-cf2c9798de05 service nova] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Received unexpected event network-vif-plugged-46e255ba-a2d4-4bd1-942b-f18624bd0198 for instance with vm_state building and task_state spawning. [ 739.447766] env[69994]: DEBUG nova.compute.manager [req-1e7b0daf-e676-4a20-bf3c-ca9c4557bb9e req-0ed2dee1-8151-481a-84f9-cf2c9798de05 service nova] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Received event network-changed-46e255ba-a2d4-4bd1-942b-f18624bd0198 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 739.447927] env[69994]: DEBUG nova.compute.manager [req-1e7b0daf-e676-4a20-bf3c-ca9c4557bb9e req-0ed2dee1-8151-481a-84f9-cf2c9798de05 service nova] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Refreshing instance network info cache due to event network-changed-46e255ba-a2d4-4bd1-942b-f18624bd0198. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 739.448153] env[69994]: DEBUG oslo_concurrency.lockutils [req-1e7b0daf-e676-4a20-bf3c-ca9c4557bb9e req-0ed2dee1-8151-481a-84f9-cf2c9798de05 service nova] Acquiring lock "refresh_cache-744fe018-d12c-44c2-98f1-c11fbfffc98e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.726597] env[69994]: DEBUG oslo_vmware.rw_handles [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5293b9ce-a056-9512-3415-07d462ecd9e0/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 739.727871] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f733a39-181b-4b34-9899-b11a2feeda69 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.736803] env[69994]: DEBUG oslo_vmware.rw_handles [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5293b9ce-a056-9512-3415-07d462ecd9e0/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 739.737133] env[69994]: ERROR oslo_vmware.rw_handles [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5293b9ce-a056-9512-3415-07d462ecd9e0/disk-0.vmdk due to incomplete transfer. [ 739.737360] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-f1fbc965-912c-44b3-aa90-4ca40f61d0fb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.747322] env[69994]: DEBUG oslo_vmware.rw_handles [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5293b9ce-a056-9512-3415-07d462ecd9e0/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 739.747541] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Uploaded image 148b7a39-161a-4c6d-a856-8b0f6909f0dd to the Glance image server {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 739.749772] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Destroying the VM {{(pid=69994) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 739.750401] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-baee8cf0-8178-45b4-b994-397bbde16166 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.758819] env[69994]: DEBUG oslo_vmware.api [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Waiting for the task: (returnval){ [ 739.758819] env[69994]: value = "task-3241570" [ 739.758819] env[69994]: _type = "Task" [ 739.758819] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.768562] env[69994]: DEBUG oslo_vmware.api [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241570, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.798694] env[69994]: DEBUG nova.network.neutron [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 739.820618] env[69994]: DEBUG oslo_vmware.api [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241569, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079261} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.820809] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 739.821683] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63c6e587-6b8d-4d75-89c8-e34ec08bb947 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.825362] env[69994]: DEBUG oslo_concurrency.lockutils [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.182s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 739.825863] env[69994]: DEBUG nova.compute.manager [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 739.840031] env[69994]: DEBUG oslo_concurrency.lockutils [None req-03042079-431b-4859-bd74-c9b0335c9106 tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.266s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 739.840031] env[69994]: DEBUG nova.objects.instance [None req-03042079-431b-4859-bd74-c9b0335c9106 tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Lazy-loading 'resources' on Instance uuid 6e8286a3-6fd1-44ee-a5ca-b21f3178334d {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 739.847735] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Reconfiguring VM instance instance-00000022 to attach disk [datastore1] e8caf244-413b-49bb-bdff-79aca0ccbc2b/e8caf244-413b-49bb-bdff-79aca0ccbc2b.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 739.848669] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2777c9c9-abea-4ba0-9cb9-e92a521bee8f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.873299] env[69994]: DEBUG oslo_vmware.api [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 739.873299] env[69994]: value = "task-3241571" [ 739.873299] env[69994]: _type = "Task" [ 739.873299] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.885544] env[69994]: DEBUG oslo_vmware.api [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241571, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.025442] env[69994]: DEBUG nova.network.neutron [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Updating instance_info_cache with network_info: [{"id": "46e255ba-a2d4-4bd1-942b-f18624bd0198", "address": "fa:16:3e:e0:78:98", "network": {"id": "6b605f54-2fa6-407e-b3f4-2ce9853b449e", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1428465928-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91697b639b15438297eec6880a72f444", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46e255ba-a2", "ovs_interfaceid": "46e255ba-a2d4-4bd1-942b-f18624bd0198", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.270052] env[69994]: DEBUG oslo_vmware.api [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241570, 'name': Destroy_Task, 'duration_secs': 0.371634} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.270285] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Destroyed the VM [ 740.270527] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Deleting Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 740.270776] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-bf11fbde-c0f9-4b4f-9556-fa2264357a32 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.277876] env[69994]: DEBUG oslo_vmware.api [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Waiting for the task: (returnval){ [ 740.277876] env[69994]: value = "task-3241572" [ 740.277876] env[69994]: _type = "Task" [ 740.277876] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.286330] env[69994]: DEBUG oslo_vmware.api [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241572, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.354040] env[69994]: DEBUG nova.compute.utils [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 740.355055] env[69994]: DEBUG nova.compute.manager [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 740.357285] env[69994]: DEBUG nova.network.neutron [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 740.387795] env[69994]: DEBUG oslo_vmware.api [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241571, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.417958] env[69994]: DEBUG nova.policy [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'edec5775f4754417afc4eab791d394dc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '114596d74d9b40248f385df6e4644aaa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 740.531020] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Releasing lock "refresh_cache-744fe018-d12c-44c2-98f1-c11fbfffc98e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 740.531020] env[69994]: DEBUG nova.compute.manager [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Instance network_info: |[{"id": "46e255ba-a2d4-4bd1-942b-f18624bd0198", "address": "fa:16:3e:e0:78:98", "network": {"id": "6b605f54-2fa6-407e-b3f4-2ce9853b449e", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1428465928-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91697b639b15438297eec6880a72f444", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46e255ba-a2", "ovs_interfaceid": "46e255ba-a2d4-4bd1-942b-f18624bd0198", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 740.531238] env[69994]: DEBUG oslo_concurrency.lockutils [req-1e7b0daf-e676-4a20-bf3c-ca9c4557bb9e req-0ed2dee1-8151-481a-84f9-cf2c9798de05 service nova] Acquired lock "refresh_cache-744fe018-d12c-44c2-98f1-c11fbfffc98e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 740.531238] env[69994]: DEBUG nova.network.neutron [req-1e7b0daf-e676-4a20-bf3c-ca9c4557bb9e req-0ed2dee1-8151-481a-84f9-cf2c9798de05 service nova] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Refreshing network info cache for port 46e255ba-a2d4-4bd1-942b-f18624bd0198 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 740.531238] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e0:78:98', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '96d8be6c-b557-4b40-b0f5-838c62a3c904', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '46e255ba-a2d4-4bd1-942b-f18624bd0198', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 740.537907] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Creating folder: Project (91697b639b15438297eec6880a72f444). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 740.543477] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9c28cd00-0f76-4d0a-a1b4-d42b14262f03 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.563521] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Created folder: Project (91697b639b15438297eec6880a72f444) in parent group-v647729. [ 740.563521] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Creating folder: Instances. Parent ref: group-v647836. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 740.563521] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0e430e74-32ea-489e-a457-9f8769a5bb51 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.574909] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Created folder: Instances in parent group-v647836. [ 740.575367] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 740.575686] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 740.576060] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f056923d-d4fa-4c91-bce7-a24398c2c3fe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.608284] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 740.608284] env[69994]: value = "task-3241575" [ 740.608284] env[69994]: _type = "Task" [ 740.608284] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.617965] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241575, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.793134] env[69994]: DEBUG oslo_vmware.api [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241572, 'name': RemoveSnapshot_Task} progress is 36%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.858317] env[69994]: DEBUG nova.compute.manager [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 740.887898] env[69994]: DEBUG oslo_vmware.api [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241571, 'name': ReconfigVM_Task, 'duration_secs': 0.892363} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.888338] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Reconfigured VM instance instance-00000022 to attach disk [datastore1] e8caf244-413b-49bb-bdff-79aca0ccbc2b/e8caf244-413b-49bb-bdff-79aca0ccbc2b.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 740.889503] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e3548676-dab0-4b61-948c-f24377293d9a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.902368] env[69994]: DEBUG oslo_vmware.api [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 740.902368] env[69994]: value = "task-3241576" [ 740.902368] env[69994]: _type = "Task" [ 740.902368] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.917407] env[69994]: DEBUG oslo_vmware.api [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241576, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.075735] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75081f67-f0b6-4a57-87ec-0f8a8735056c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.085759] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcd51c1c-1292-4ee4-bae2-3fe9293bcbb3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.093831] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-084ba117-8c3c-4aee-a0d2-9de0cdc9b9b7 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Volume attach. Driver type: vmdk {{(pid=69994) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 741.094028] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-084ba117-8c3c-4aee-a0d2-9de0cdc9b9b7 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647835', 'volume_id': '50fa937b-e572-4847-88c1-bfd627eacc5e', 'name': 'volume-50fa937b-e572-4847-88c1-bfd627eacc5e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '53a8714c-50f7-4990-a3d9-86f8fc908d03', 'attached_at': '', 'detached_at': '', 'volume_id': '50fa937b-e572-4847-88c1-bfd627eacc5e', 'serial': '50fa937b-e572-4847-88c1-bfd627eacc5e'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 741.094819] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ef6b343-e3fb-4e47-a2c7-df996fcf8f6a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.147116] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fe1de6e-4afd-4c9a-ba7b-8e7a372e61d2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.151124] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ee130b1-335a-43e7-bbaf-fcbd94e89e22 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.176809] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c000e815-716e-4ed4-a3e8-8bc7c4e3618e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.188377] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-084ba117-8c3c-4aee-a0d2-9de0cdc9b9b7 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Reconfiguring VM instance instance-0000000a to attach disk [datastore1] volume-50fa937b-e572-4847-88c1-bfd627eacc5e/volume-50fa937b-e572-4847-88c1-bfd627eacc5e.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 741.188649] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241575, 'name': CreateVM_Task, 'duration_secs': 0.529751} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.190225] env[69994]: DEBUG nova.network.neutron [req-1e7b0daf-e676-4a20-bf3c-ca9c4557bb9e req-0ed2dee1-8151-481a-84f9-cf2c9798de05 service nova] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Updated VIF entry in instance network info cache for port 46e255ba-a2d4-4bd1-942b-f18624bd0198. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 741.190225] env[69994]: DEBUG nova.network.neutron [req-1e7b0daf-e676-4a20-bf3c-ca9c4557bb9e req-0ed2dee1-8151-481a-84f9-cf2c9798de05 service nova] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Updating instance_info_cache with network_info: [{"id": "46e255ba-a2d4-4bd1-942b-f18624bd0198", "address": "fa:16:3e:e0:78:98", "network": {"id": "6b605f54-2fa6-407e-b3f4-2ce9853b449e", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1428465928-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91697b639b15438297eec6880a72f444", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46e255ba-a2", "ovs_interfaceid": "46e255ba-a2d4-4bd1-942b-f18624bd0198", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 741.191099] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d08da6d-9b86-4d6a-8479-b4b695a7643a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.203976] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 741.205407] env[69994]: DEBUG oslo_concurrency.lockutils [req-1e7b0daf-e676-4a20-bf3c-ca9c4557bb9e req-0ed2dee1-8151-481a-84f9-cf2c9798de05 service nova] Releasing lock "refresh_cache-744fe018-d12c-44c2-98f1-c11fbfffc98e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 741.206587] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.206757] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 741.207081] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 741.207905] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1b14a07-bc2a-48e4-b97f-ac9ce59bcfc1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.218241] env[69994]: DEBUG nova.compute.provider_tree [None req-03042079-431b-4859-bd74-c9b0335c9106 tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 741.222781] env[69994]: DEBUG oslo_vmware.api [None req-084ba117-8c3c-4aee-a0d2-9de0cdc9b9b7 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] Waiting for the task: (returnval){ [ 741.222781] env[69994]: value = "task-3241577" [ 741.222781] env[69994]: _type = "Task" [ 741.222781] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.226713] env[69994]: DEBUG oslo_vmware.api [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Waiting for the task: (returnval){ [ 741.226713] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52bc0c3b-0178-f62f-d38d-2c2c756d6b7f" [ 741.226713] env[69994]: _type = "Task" [ 741.226713] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.236155] env[69994]: DEBUG oslo_vmware.api [None req-084ba117-8c3c-4aee-a0d2-9de0cdc9b9b7 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] Task: {'id': task-3241577, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.247551] env[69994]: DEBUG oslo_vmware.api [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52bc0c3b-0178-f62f-d38d-2c2c756d6b7f, 'name': SearchDatastore_Task, 'duration_secs': 0.011969} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.247551] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 741.247551] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 741.247551] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.247770] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 741.247770] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 741.247770] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9e3c511f-99cb-4c35-a4b2-fba9774c3538 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.258456] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 741.258646] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 741.259434] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-578aa7ea-34d8-4629-8dde-fa50d55569f1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.265443] env[69994]: DEBUG nova.network.neutron [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Successfully created port: 50763b02-561b-4c13-8a91-c7e639f09715 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 741.269229] env[69994]: DEBUG oslo_vmware.api [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Waiting for the task: (returnval){ [ 741.269229] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52ac4356-5ab6-69f5-cc3f-bee8f8288739" [ 741.269229] env[69994]: _type = "Task" [ 741.269229] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.279645] env[69994]: DEBUG oslo_vmware.api [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52ac4356-5ab6-69f5-cc3f-bee8f8288739, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.291436] env[69994]: DEBUG oslo_vmware.api [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241572, 'name': RemoveSnapshot_Task, 'duration_secs': 0.594477} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.291776] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Deleted Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 741.292015] env[69994]: INFO nova.compute.manager [None req-636ad1b7-920a-4963-be93-b8f67ff00db8 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Took 13.88 seconds to snapshot the instance on the hypervisor. [ 741.411800] env[69994]: DEBUG oslo_vmware.api [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241576, 'name': Rename_Task, 'duration_secs': 0.180014} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.411800] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 741.412019] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-119381d0-3afc-4cde-a363-db7d0c78eb7b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.420276] env[69994]: DEBUG oslo_vmware.api [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 741.420276] env[69994]: value = "task-3241578" [ 741.420276] env[69994]: _type = "Task" [ 741.420276] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.429303] env[69994]: DEBUG oslo_vmware.api [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241578, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.751843] env[69994]: DEBUG oslo_vmware.api [None req-084ba117-8c3c-4aee-a0d2-9de0cdc9b9b7 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] Task: {'id': task-3241577, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.767192] env[69994]: DEBUG nova.scheduler.client.report [None req-03042079-431b-4859-bd74-c9b0335c9106 tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Updated inventory for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with generation 65 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 741.767488] env[69994]: DEBUG nova.compute.provider_tree [None req-03042079-431b-4859-bd74-c9b0335c9106 tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Updating resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 generation from 65 to 66 during operation: update_inventory {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 741.767673] env[69994]: DEBUG nova.compute.provider_tree [None req-03042079-431b-4859-bd74-c9b0335c9106 tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 741.781787] env[69994]: DEBUG oslo_vmware.api [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52ac4356-5ab6-69f5-cc3f-bee8f8288739, 'name': SearchDatastore_Task, 'duration_secs': 0.025711} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.787284] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c60c076f-bedb-4d8e-b308-8020cce06960 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.797789] env[69994]: DEBUG oslo_vmware.api [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Waiting for the task: (returnval){ [ 741.797789] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52e5a22a-4e09-1ae0-646d-8f7ce467456e" [ 741.797789] env[69994]: _type = "Task" [ 741.797789] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.809181] env[69994]: DEBUG oslo_vmware.api [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52e5a22a-4e09-1ae0-646d-8f7ce467456e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.868968] env[69994]: DEBUG nova.compute.manager [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 741.900498] env[69994]: DEBUG nova.virt.hardware [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 741.900747] env[69994]: DEBUG nova.virt.hardware [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 741.900905] env[69994]: DEBUG nova.virt.hardware [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 741.901101] env[69994]: DEBUG nova.virt.hardware [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 741.901251] env[69994]: DEBUG nova.virt.hardware [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 741.901468] env[69994]: DEBUG nova.virt.hardware [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 741.901699] env[69994]: DEBUG nova.virt.hardware [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 741.901879] env[69994]: DEBUG nova.virt.hardware [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 741.902062] env[69994]: DEBUG nova.virt.hardware [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 741.902230] env[69994]: DEBUG nova.virt.hardware [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 741.902400] env[69994]: DEBUG nova.virt.hardware [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 741.903293] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ca98ea1-9b73-457a-b29c-6d1a386aa484 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.911889] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0eaec78-8a7d-4b2a-891e-333af7f9a0ac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.937086] env[69994]: DEBUG oslo_vmware.api [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241578, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.131229] env[69994]: DEBUG nova.compute.manager [req-5cfb6ad6-0e7e-4cfa-8049-4a4bfe98f6bb req-25b5f757-0f1f-4a27-900d-e88d60dffe3c service nova] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Received event network-changed-18ed1bc7-f241-4d6e-83f9-4df1b8b70c45 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 742.131229] env[69994]: DEBUG nova.compute.manager [req-5cfb6ad6-0e7e-4cfa-8049-4a4bfe98f6bb req-25b5f757-0f1f-4a27-900d-e88d60dffe3c service nova] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Refreshing instance network info cache due to event network-changed-18ed1bc7-f241-4d6e-83f9-4df1b8b70c45. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 742.131229] env[69994]: DEBUG oslo_concurrency.lockutils [req-5cfb6ad6-0e7e-4cfa-8049-4a4bfe98f6bb req-25b5f757-0f1f-4a27-900d-e88d60dffe3c service nova] Acquiring lock "refresh_cache-ce6f9a88-faa8-442e-8b48-64979dd2d03e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.131229] env[69994]: DEBUG oslo_concurrency.lockutils [req-5cfb6ad6-0e7e-4cfa-8049-4a4bfe98f6bb req-25b5f757-0f1f-4a27-900d-e88d60dffe3c service nova] Acquired lock "refresh_cache-ce6f9a88-faa8-442e-8b48-64979dd2d03e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 742.131229] env[69994]: DEBUG nova.network.neutron [req-5cfb6ad6-0e7e-4cfa-8049-4a4bfe98f6bb req-25b5f757-0f1f-4a27-900d-e88d60dffe3c service nova] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Refreshing network info cache for port 18ed1bc7-f241-4d6e-83f9-4df1b8b70c45 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 742.235181] env[69994]: DEBUG oslo_vmware.api [None req-084ba117-8c3c-4aee-a0d2-9de0cdc9b9b7 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] Task: {'id': task-3241577, 'name': ReconfigVM_Task, 'duration_secs': 0.770339} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.235416] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-084ba117-8c3c-4aee-a0d2-9de0cdc9b9b7 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Reconfigured VM instance instance-0000000a to attach disk [datastore1] volume-50fa937b-e572-4847-88c1-bfd627eacc5e/volume-50fa937b-e572-4847-88c1-bfd627eacc5e.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 742.240506] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-90da852a-8074-435f-98dc-8da574421729 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.256710] env[69994]: DEBUG oslo_vmware.api [None req-084ba117-8c3c-4aee-a0d2-9de0cdc9b9b7 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] Waiting for the task: (returnval){ [ 742.256710] env[69994]: value = "task-3241579" [ 742.256710] env[69994]: _type = "Task" [ 742.256710] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.266420] env[69994]: DEBUG oslo_vmware.api [None req-084ba117-8c3c-4aee-a0d2-9de0cdc9b9b7 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] Task: {'id': task-3241579, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.275844] env[69994]: DEBUG oslo_concurrency.lockutils [None req-03042079-431b-4859-bd74-c9b0335c9106 tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.438s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 742.278541] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.581s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 742.280745] env[69994]: INFO nova.compute.claims [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 742.319678] env[69994]: INFO nova.scheduler.client.report [None req-03042079-431b-4859-bd74-c9b0335c9106 tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Deleted allocations for instance 6e8286a3-6fd1-44ee-a5ca-b21f3178334d [ 742.332886] env[69994]: DEBUG oslo_vmware.api [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52e5a22a-4e09-1ae0-646d-8f7ce467456e, 'name': SearchDatastore_Task, 'duration_secs': 0.014233} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.333798] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 742.334270] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 744fe018-d12c-44c2-98f1-c11fbfffc98e/744fe018-d12c-44c2-98f1-c11fbfffc98e.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 742.334739] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fb4526be-d561-4493-8b46-fb61103f12d0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.346585] env[69994]: DEBUG oslo_vmware.api [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Waiting for the task: (returnval){ [ 742.346585] env[69994]: value = "task-3241580" [ 742.346585] env[69994]: _type = "Task" [ 742.346585] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.356506] env[69994]: DEBUG oslo_vmware.api [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Task: {'id': task-3241580, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.440375] env[69994]: DEBUG oslo_vmware.api [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241578, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.770122] env[69994]: DEBUG oslo_vmware.api [None req-084ba117-8c3c-4aee-a0d2-9de0cdc9b9b7 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] Task: {'id': task-3241579, 'name': ReconfigVM_Task, 'duration_secs': 0.187979} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.770879] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-084ba117-8c3c-4aee-a0d2-9de0cdc9b9b7 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647835', 'volume_id': '50fa937b-e572-4847-88c1-bfd627eacc5e', 'name': 'volume-50fa937b-e572-4847-88c1-bfd627eacc5e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '53a8714c-50f7-4990-a3d9-86f8fc908d03', 'attached_at': '', 'detached_at': '', 'volume_id': '50fa937b-e572-4847-88c1-bfd627eacc5e', 'serial': '50fa937b-e572-4847-88c1-bfd627eacc5e'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 742.837354] env[69994]: DEBUG oslo_concurrency.lockutils [None req-03042079-431b-4859-bd74-c9b0335c9106 tempest-DeleteServersAdminTestJSON-626869198 tempest-DeleteServersAdminTestJSON-626869198-project-member] Lock "6e8286a3-6fd1-44ee-a5ca-b21f3178334d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 48.071s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 742.860204] env[69994]: DEBUG oslo_vmware.api [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Task: {'id': task-3241580, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.906938] env[69994]: DEBUG nova.network.neutron [req-5cfb6ad6-0e7e-4cfa-8049-4a4bfe98f6bb req-25b5f757-0f1f-4a27-900d-e88d60dffe3c service nova] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Updated VIF entry in instance network info cache for port 18ed1bc7-f241-4d6e-83f9-4df1b8b70c45. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 742.906938] env[69994]: DEBUG nova.network.neutron [req-5cfb6ad6-0e7e-4cfa-8049-4a4bfe98f6bb req-25b5f757-0f1f-4a27-900d-e88d60dffe3c service nova] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Updating instance_info_cache with network_info: [{"id": "18ed1bc7-f241-4d6e-83f9-4df1b8b70c45", "address": "fa:16:3e:4b:69:6f", "network": {"id": "74734ae5-86fb-4652-8203-198f5d02a70f", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-585354620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "dfa35e60b54941dfbfb8671758ccd039", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18ed1bc7-f2", "ovs_interfaceid": "18ed1bc7-f241-4d6e-83f9-4df1b8b70c45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.941178] env[69994]: DEBUG oslo_vmware.api [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241578, 'name': PowerOnVM_Task, 'duration_secs': 1.043584} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.944484] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 742.944484] env[69994]: INFO nova.compute.manager [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Took 10.38 seconds to spawn the instance on the hypervisor. [ 742.944484] env[69994]: DEBUG nova.compute.manager [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 742.944484] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21a93774-69c9-4ed3-bafb-98f35afbcda5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.359962] env[69994]: DEBUG oslo_vmware.api [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Task: {'id': task-3241580, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.612539} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.362370] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 744fe018-d12c-44c2-98f1-c11fbfffc98e/744fe018-d12c-44c2-98f1-c11fbfffc98e.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 743.362586] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 743.362996] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b967304e-0057-4390-b9f4-2762f0aae490 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.370240] env[69994]: DEBUG oslo_vmware.api [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Waiting for the task: (returnval){ [ 743.370240] env[69994]: value = "task-3241581" [ 743.370240] env[69994]: _type = "Task" [ 743.370240] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.382504] env[69994]: DEBUG oslo_vmware.api [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Task: {'id': task-3241581, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.409358] env[69994]: DEBUG oslo_concurrency.lockutils [req-5cfb6ad6-0e7e-4cfa-8049-4a4bfe98f6bb req-25b5f757-0f1f-4a27-900d-e88d60dffe3c service nova] Releasing lock "refresh_cache-ce6f9a88-faa8-442e-8b48-64979dd2d03e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 743.464588] env[69994]: INFO nova.compute.manager [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Took 54.32 seconds to build instance. [ 743.474668] env[69994]: DEBUG nova.network.neutron [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Successfully updated port: 50763b02-561b-4c13-8a91-c7e639f09715 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 743.835456] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2d95579-4345-4e36-9856-04ce7920f4e3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.840712] env[69994]: DEBUG nova.objects.instance [None req-084ba117-8c3c-4aee-a0d2-9de0cdc9b9b7 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] Lazy-loading 'flavor' on Instance uuid 53a8714c-50f7-4990-a3d9-86f8fc908d03 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 743.845540] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-430eda76-adb7-451a-8b14-bbc4de4076b0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.883370] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44143a66-dfee-44dc-80bb-6524ac583d8e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.892427] env[69994]: DEBUG oslo_vmware.api [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Task: {'id': task-3241581, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070566} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.894517] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 743.895600] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f155bdf7-3bf3-43d8-9ea3-e617fb674229 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.899087] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3610adc9-6095-45a1-b3a2-4e50bdc50690 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.915797] env[69994]: DEBUG nova.compute.provider_tree [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 743.935441] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Reconfiguring VM instance instance-00000023 to attach disk [datastore1] 744fe018-d12c-44c2-98f1-c11fbfffc98e/744fe018-d12c-44c2-98f1-c11fbfffc98e.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 743.936538] env[69994]: DEBUG nova.scheduler.client.report [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 743.939888] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8c0cc0b2-3028-457d-a7ed-8de03c619ba4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.962023] env[69994]: DEBUG oslo_vmware.api [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Waiting for the task: (returnval){ [ 743.962023] env[69994]: value = "task-3241582" [ 743.962023] env[69994]: _type = "Task" [ 743.962023] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.970812] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1eb8a80-c351-4f7d-b4cd-c2d6286f8e54 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "e8caf244-413b-49bb-bdff-79aca0ccbc2b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 86.156s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 743.972597] env[69994]: DEBUG nova.compute.manager [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 743.972909] env[69994]: DEBUG oslo_vmware.api [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Task: {'id': task-3241582, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.974510] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e738ba12-4f10-43a6-9052-b4fb3dbba509 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.978280] env[69994]: DEBUG oslo_concurrency.lockutils [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Acquiring lock "refresh_cache-289cbcc2-cd8f-4c4f-9169-a897f5527de1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.978420] env[69994]: DEBUG oslo_concurrency.lockutils [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Acquired lock "refresh_cache-289cbcc2-cd8f-4c4f-9169-a897f5527de1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 743.978564] env[69994]: DEBUG nova.network.neutron [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 744.352464] env[69994]: DEBUG oslo_concurrency.lockutils [None req-084ba117-8c3c-4aee-a0d2-9de0cdc9b9b7 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] Lock "53a8714c-50f7-4990-a3d9-86f8fc908d03" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.389s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 744.456612] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.178s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 744.457207] env[69994]: DEBUG nova.compute.manager [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 744.459757] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.353s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 744.461164] env[69994]: INFO nova.compute.claims [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 744.473335] env[69994]: DEBUG oslo_vmware.api [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Task: {'id': task-3241582, 'name': ReconfigVM_Task, 'duration_secs': 0.283099} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.473587] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Reconfigured VM instance instance-00000023 to attach disk [datastore1] 744fe018-d12c-44c2-98f1-c11fbfffc98e/744fe018-d12c-44c2-98f1-c11fbfffc98e.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 744.474210] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5ed4ee20-df73-480b-9f9a-203c65dcadf8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.478581] env[69994]: DEBUG nova.compute.manager [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 744.488254] env[69994]: DEBUG oslo_vmware.api [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Waiting for the task: (returnval){ [ 744.488254] env[69994]: value = "task-3241583" [ 744.488254] env[69994]: _type = "Task" [ 744.488254] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.489687] env[69994]: INFO nova.compute.manager [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] instance snapshotting [ 744.495281] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d61c1a48-c4b9-4623-85e7-4dd3bf66d4f1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.503746] env[69994]: DEBUG oslo_vmware.api [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Task: {'id': task-3241583, 'name': Rename_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.520729] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4322bf5c-4722-436a-8a6d-fbe515ed9b16 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.556682] env[69994]: DEBUG nova.network.neutron [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 744.569382] env[69994]: DEBUG nova.compute.manager [req-1c1f61d7-e08f-452f-a098-b08fc0ab923d req-4e24ef29-5e93-4408-80ff-56ea1ebc39d8 service nova] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Received event network-changed-18ed1bc7-f241-4d6e-83f9-4df1b8b70c45 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 744.569621] env[69994]: DEBUG nova.compute.manager [req-1c1f61d7-e08f-452f-a098-b08fc0ab923d req-4e24ef29-5e93-4408-80ff-56ea1ebc39d8 service nova] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Refreshing instance network info cache due to event network-changed-18ed1bc7-f241-4d6e-83f9-4df1b8b70c45. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 744.569780] env[69994]: DEBUG oslo_concurrency.lockutils [req-1c1f61d7-e08f-452f-a098-b08fc0ab923d req-4e24ef29-5e93-4408-80ff-56ea1ebc39d8 service nova] Acquiring lock "refresh_cache-ce6f9a88-faa8-442e-8b48-64979dd2d03e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.569921] env[69994]: DEBUG oslo_concurrency.lockutils [req-1c1f61d7-e08f-452f-a098-b08fc0ab923d req-4e24ef29-5e93-4408-80ff-56ea1ebc39d8 service nova] Acquired lock "refresh_cache-ce6f9a88-faa8-442e-8b48-64979dd2d03e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 744.570354] env[69994]: DEBUG nova.network.neutron [req-1c1f61d7-e08f-452f-a098-b08fc0ab923d req-4e24ef29-5e93-4408-80ff-56ea1ebc39d8 service nova] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Refreshing network info cache for port 18ed1bc7-f241-4d6e-83f9-4df1b8b70c45 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 744.859602] env[69994]: DEBUG nova.network.neutron [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Updating instance_info_cache with network_info: [{"id": "50763b02-561b-4c13-8a91-c7e639f09715", "address": "fa:16:3e:e2:66:27", "network": {"id": "3dc045d7-eb28-46c5-be05-cdcca879d533", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-956262977-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "114596d74d9b40248f385df6e4644aaa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cc0a33d-17c0-4b87-b48f-413a87a4cc6a", "external-id": "nsx-vlan-transportzone-865", "segmentation_id": 865, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50763b02-56", "ovs_interfaceid": "50763b02-561b-4c13-8a91-c7e639f09715", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.965392] env[69994]: DEBUG nova.compute.utils [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 744.968817] env[69994]: DEBUG nova.compute.manager [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 744.968990] env[69994]: DEBUG nova.network.neutron [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 745.003845] env[69994]: DEBUG oslo_vmware.api [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Task: {'id': task-3241583, 'name': Rename_Task, 'duration_secs': 0.170369} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.004151] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 745.004435] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c2aff6d9-d54d-4db0-bb67-fd18cb352319 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.013054] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 745.015135] env[69994]: DEBUG oslo_vmware.api [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Waiting for the task: (returnval){ [ 745.015135] env[69994]: value = "task-3241584" [ 745.015135] env[69994]: _type = "Task" [ 745.015135] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.024584] env[69994]: DEBUG oslo_vmware.api [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Task: {'id': task-3241584, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.034831] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Creating Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 745.035144] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c448d585-ee54-454b-a580-c10ca25f7326 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.041787] env[69994]: DEBUG nova.policy [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '958a32e69e9c4066bfdae5ccc5b5609d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7668e2fbee0a4948a0cbdbd964e764e4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 745.045642] env[69994]: DEBUG oslo_vmware.api [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Waiting for the task: (returnval){ [ 745.045642] env[69994]: value = "task-3241585" [ 745.045642] env[69994]: _type = "Task" [ 745.045642] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.057579] env[69994]: DEBUG oslo_vmware.api [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241585, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.368446] env[69994]: DEBUG oslo_concurrency.lockutils [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Releasing lock "refresh_cache-289cbcc2-cd8f-4c4f-9169-a897f5527de1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 745.368446] env[69994]: DEBUG nova.compute.manager [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Instance network_info: |[{"id": "50763b02-561b-4c13-8a91-c7e639f09715", "address": "fa:16:3e:e2:66:27", "network": {"id": "3dc045d7-eb28-46c5-be05-cdcca879d533", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-956262977-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "114596d74d9b40248f385df6e4644aaa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cc0a33d-17c0-4b87-b48f-413a87a4cc6a", "external-id": "nsx-vlan-transportzone-865", "segmentation_id": 865, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50763b02-56", "ovs_interfaceid": "50763b02-561b-4c13-8a91-c7e639f09715", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 745.368571] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e2:66:27', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3cc0a33d-17c0-4b87-b48f-413a87a4cc6a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '50763b02-561b-4c13-8a91-c7e639f09715', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 745.385282] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Creating folder: Project (114596d74d9b40248f385df6e4644aaa). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 745.388673] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5f167c39-bac7-41d6-8618-00a026e06e03 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.404702] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Created folder: Project (114596d74d9b40248f385df6e4644aaa) in parent group-v647729. [ 745.404925] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Creating folder: Instances. Parent ref: group-v647839. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 745.405249] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3318ee67-8ac8-4650-af88-dd6cb8e1f388 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.420278] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Created folder: Instances in parent group-v647839. [ 745.420571] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 745.420874] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 745.421141] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f1924506-546b-4855-869f-5efa19b521c8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.447320] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 745.447320] env[69994]: value = "task-3241588" [ 745.447320] env[69994]: _type = "Task" [ 745.447320] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.457736] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241588, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.469613] env[69994]: DEBUG nova.compute.manager [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 745.531388] env[69994]: DEBUG oslo_vmware.api [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Task: {'id': task-3241584, 'name': PowerOnVM_Task, 'duration_secs': 0.483816} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.531388] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 745.531388] env[69994]: INFO nova.compute.manager [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Took 7.86 seconds to spawn the instance on the hypervisor. [ 745.531388] env[69994]: DEBUG nova.compute.manager [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 745.532071] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-527833b2-57ce-4b10-b500-810e0e8679c9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.563320] env[69994]: DEBUG oslo_vmware.api [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241585, 'name': CreateSnapshot_Task, 'duration_secs': 0.482395} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.564980] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Created Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 745.564980] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe9d55ee-de46-43c0-88c0-e17856c42c86 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.611754] env[69994]: DEBUG nova.network.neutron [req-1c1f61d7-e08f-452f-a098-b08fc0ab923d req-4e24ef29-5e93-4408-80ff-56ea1ebc39d8 service nova] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Updated VIF entry in instance network info cache for port 18ed1bc7-f241-4d6e-83f9-4df1b8b70c45. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 745.611754] env[69994]: DEBUG nova.network.neutron [req-1c1f61d7-e08f-452f-a098-b08fc0ab923d req-4e24ef29-5e93-4408-80ff-56ea1ebc39d8 service nova] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Updating instance_info_cache with network_info: [{"id": "18ed1bc7-f241-4d6e-83f9-4df1b8b70c45", "address": "fa:16:3e:4b:69:6f", "network": {"id": "74734ae5-86fb-4652-8203-198f5d02a70f", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-585354620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.211", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "dfa35e60b54941dfbfb8671758ccd039", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18ed1bc7-f2", "ovs_interfaceid": "18ed1bc7-f241-4d6e-83f9-4df1b8b70c45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 745.876787] env[69994]: DEBUG nova.network.neutron [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Successfully created port: b5372a7b-7de1-4258-959a-ce83afa3070b {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 745.921127] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquiring lock "566522b0-7aa7-4552-9be7-035d742ba394" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 745.921618] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "566522b0-7aa7-4552-9be7-035d742ba394" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 745.968104] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241588, 'name': CreateVM_Task, 'duration_secs': 0.391898} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.968597] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 745.971259] env[69994]: DEBUG oslo_concurrency.lockutils [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.971552] env[69994]: DEBUG oslo_concurrency.lockutils [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 745.972028] env[69994]: DEBUG oslo_concurrency.lockutils [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 745.972374] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28ab22fd-e3fb-426f-9d47-437abdf7c00b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.984300] env[69994]: DEBUG oslo_vmware.api [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Waiting for the task: (returnval){ [ 745.984300] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]521c0816-df95-15f8-4fe1-9defb05b37b6" [ 745.984300] env[69994]: _type = "Task" [ 745.984300] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.995621] env[69994]: DEBUG oslo_vmware.api [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521c0816-df95-15f8-4fe1-9defb05b37b6, 'name': SearchDatastore_Task, 'duration_secs': 0.011636} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.996070] env[69994]: DEBUG oslo_concurrency.lockutils [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 745.996401] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 745.996767] env[69994]: DEBUG oslo_concurrency.lockutils [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.997039] env[69994]: DEBUG oslo_concurrency.lockutils [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 745.997342] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 745.997699] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3eeb25f0-fade-417e-86c3-43e49a7822af {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.014117] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 746.014117] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 746.014117] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae5ef722-5047-4fca-a299-06bb632d6342 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.024254] env[69994]: DEBUG oslo_vmware.api [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Waiting for the task: (returnval){ [ 746.024254] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52fd255e-1184-ab46-0881-a8343bcd3f7d" [ 746.024254] env[69994]: _type = "Task" [ 746.024254] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.034662] env[69994]: DEBUG oslo_vmware.api [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52fd255e-1184-ab46-0881-a8343bcd3f7d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.056389] env[69994]: INFO nova.compute.manager [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Took 55.07 seconds to build instance. [ 746.086418] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Creating linked-clone VM from snapshot {{(pid=69994) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 746.086738] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-b1578aea-b698-4100-bfbf-fd9dc6be7a10 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.097525] env[69994]: DEBUG oslo_vmware.api [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Waiting for the task: (returnval){ [ 746.097525] env[69994]: value = "task-3241589" [ 746.097525] env[69994]: _type = "Task" [ 746.097525] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.107247] env[69994]: DEBUG oslo_vmware.api [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241589, 'name': CloneVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.117143] env[69994]: DEBUG oslo_concurrency.lockutils [req-1c1f61d7-e08f-452f-a098-b08fc0ab923d req-4e24ef29-5e93-4408-80ff-56ea1ebc39d8 service nova] Releasing lock "refresh_cache-ce6f9a88-faa8-442e-8b48-64979dd2d03e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 746.117391] env[69994]: DEBUG nova.compute.manager [req-1c1f61d7-e08f-452f-a098-b08fc0ab923d req-4e24ef29-5e93-4408-80ff-56ea1ebc39d8 service nova] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Received event network-vif-plugged-50763b02-561b-4c13-8a91-c7e639f09715 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 746.117578] env[69994]: DEBUG oslo_concurrency.lockutils [req-1c1f61d7-e08f-452f-a098-b08fc0ab923d req-4e24ef29-5e93-4408-80ff-56ea1ebc39d8 service nova] Acquiring lock "289cbcc2-cd8f-4c4f-9169-a897f5527de1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 746.117772] env[69994]: DEBUG oslo_concurrency.lockutils [req-1c1f61d7-e08f-452f-a098-b08fc0ab923d req-4e24ef29-5e93-4408-80ff-56ea1ebc39d8 service nova] Lock "289cbcc2-cd8f-4c4f-9169-a897f5527de1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 746.117955] env[69994]: DEBUG oslo_concurrency.lockutils [req-1c1f61d7-e08f-452f-a098-b08fc0ab923d req-4e24ef29-5e93-4408-80ff-56ea1ebc39d8 service nova] Lock "289cbcc2-cd8f-4c4f-9169-a897f5527de1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 746.118224] env[69994]: DEBUG nova.compute.manager [req-1c1f61d7-e08f-452f-a098-b08fc0ab923d req-4e24ef29-5e93-4408-80ff-56ea1ebc39d8 service nova] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] No waiting events found dispatching network-vif-plugged-50763b02-561b-4c13-8a91-c7e639f09715 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 746.118329] env[69994]: WARNING nova.compute.manager [req-1c1f61d7-e08f-452f-a098-b08fc0ab923d req-4e24ef29-5e93-4408-80ff-56ea1ebc39d8 service nova] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Received unexpected event network-vif-plugged-50763b02-561b-4c13-8a91-c7e639f09715 for instance with vm_state building and task_state spawning. [ 746.118517] env[69994]: DEBUG nova.compute.manager [req-1c1f61d7-e08f-452f-a098-b08fc0ab923d req-4e24ef29-5e93-4408-80ff-56ea1ebc39d8 service nova] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Received event network-changed-50763b02-561b-4c13-8a91-c7e639f09715 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 746.118679] env[69994]: DEBUG nova.compute.manager [req-1c1f61d7-e08f-452f-a098-b08fc0ab923d req-4e24ef29-5e93-4408-80ff-56ea1ebc39d8 service nova] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Refreshing instance network info cache due to event network-changed-50763b02-561b-4c13-8a91-c7e639f09715. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 746.118863] env[69994]: DEBUG oslo_concurrency.lockutils [req-1c1f61d7-e08f-452f-a098-b08fc0ab923d req-4e24ef29-5e93-4408-80ff-56ea1ebc39d8 service nova] Acquiring lock "refresh_cache-289cbcc2-cd8f-4c4f-9169-a897f5527de1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.119011] env[69994]: DEBUG oslo_concurrency.lockutils [req-1c1f61d7-e08f-452f-a098-b08fc0ab923d req-4e24ef29-5e93-4408-80ff-56ea1ebc39d8 service nova] Acquired lock "refresh_cache-289cbcc2-cd8f-4c4f-9169-a897f5527de1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 746.119263] env[69994]: DEBUG nova.network.neutron [req-1c1f61d7-e08f-452f-a098-b08fc0ab923d req-4e24ef29-5e93-4408-80ff-56ea1ebc39d8 service nova] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Refreshing network info cache for port 50763b02-561b-4c13-8a91-c7e639f09715 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 746.158680] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd125623-55bb-4b96-a679-1b2f0eaba4ab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.167378] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce502d00-c2ed-4093-ad06-77a701c8ccc1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.202080] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66baf563-d757-4c51-9ae4-ef2e91908591 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.210651] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7a95d51-518e-4835-9edc-ebe586b59abc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.226268] env[69994]: DEBUG nova.compute.provider_tree [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 746.487168] env[69994]: DEBUG nova.compute.manager [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 746.515525] env[69994]: DEBUG nova.virt.hardware [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 746.515855] env[69994]: DEBUG nova.virt.hardware [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 746.516120] env[69994]: DEBUG nova.virt.hardware [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 746.516408] env[69994]: DEBUG nova.virt.hardware [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 746.516633] env[69994]: DEBUG nova.virt.hardware [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 746.516859] env[69994]: DEBUG nova.virt.hardware [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 746.517173] env[69994]: DEBUG nova.virt.hardware [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 746.517393] env[69994]: DEBUG nova.virt.hardware [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 746.517694] env[69994]: DEBUG nova.virt.hardware [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 746.517943] env[69994]: DEBUG nova.virt.hardware [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 746.518072] env[69994]: DEBUG nova.virt.hardware [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 746.519337] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ba29621-68b3-49d7-b4d1-4e119efa366d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.537691] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4deae03e-f1b6-42ed-b434-0bc83fa4c6bd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.549257] env[69994]: DEBUG oslo_vmware.api [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52fd255e-1184-ab46-0881-a8343bcd3f7d, 'name': SearchDatastore_Task, 'duration_secs': 0.01749} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.551210] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc654db8-9a64-457d-b09f-71082e832686 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.567188] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2b0c0eb2-ff5f-4824-bb2c-1a6b07c2433d tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Lock "744fe018-d12c-44c2-98f1-c11fbfffc98e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.562s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 746.572025] env[69994]: DEBUG oslo_vmware.api [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Waiting for the task: (returnval){ [ 746.572025] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]524101f6-2ce7-8f64-1a86-64ce95056e5c" [ 746.572025] env[69994]: _type = "Task" [ 746.572025] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.582838] env[69994]: DEBUG oslo_vmware.api [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]524101f6-2ce7-8f64-1a86-64ce95056e5c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.608216] env[69994]: DEBUG oslo_vmware.api [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241589, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.730747] env[69994]: DEBUG nova.scheduler.client.report [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 746.784560] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f37e5438-7d41-4bdb-ac9b-6aade86e6389 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] Acquiring lock "53a8714c-50f7-4990-a3d9-86f8fc908d03" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 746.785089] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f37e5438-7d41-4bdb-ac9b-6aade86e6389 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] Lock "53a8714c-50f7-4990-a3d9-86f8fc908d03" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 747.059998] env[69994]: DEBUG nova.network.neutron [req-1c1f61d7-e08f-452f-a098-b08fc0ab923d req-4e24ef29-5e93-4408-80ff-56ea1ebc39d8 service nova] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Updated VIF entry in instance network info cache for port 50763b02-561b-4c13-8a91-c7e639f09715. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 747.060500] env[69994]: DEBUG nova.network.neutron [req-1c1f61d7-e08f-452f-a098-b08fc0ab923d req-4e24ef29-5e93-4408-80ff-56ea1ebc39d8 service nova] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Updating instance_info_cache with network_info: [{"id": "50763b02-561b-4c13-8a91-c7e639f09715", "address": "fa:16:3e:e2:66:27", "network": {"id": "3dc045d7-eb28-46c5-be05-cdcca879d533", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-956262977-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "114596d74d9b40248f385df6e4644aaa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cc0a33d-17c0-4b87-b48f-413a87a4cc6a", "external-id": "nsx-vlan-transportzone-865", "segmentation_id": 865, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50763b02-56", "ovs_interfaceid": "50763b02-561b-4c13-8a91-c7e639f09715", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.071091] env[69994]: DEBUG nova.compute.manager [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 747.085805] env[69994]: DEBUG oslo_vmware.api [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]524101f6-2ce7-8f64-1a86-64ce95056e5c, 'name': SearchDatastore_Task, 'duration_secs': 0.011929} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.086184] env[69994]: DEBUG oslo_concurrency.lockutils [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 747.086340] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 289cbcc2-cd8f-4c4f-9169-a897f5527de1/289cbcc2-cd8f-4c4f-9169-a897f5527de1.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 747.086748] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-93f04f55-f38f-48a7-bd12-1f6010cfae1d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.096779] env[69994]: DEBUG oslo_vmware.api [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Waiting for the task: (returnval){ [ 747.096779] env[69994]: value = "task-3241590" [ 747.096779] env[69994]: _type = "Task" [ 747.096779] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.115912] env[69994]: DEBUG oslo_vmware.api [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Task: {'id': task-3241590, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.120072] env[69994]: DEBUG oslo_vmware.api [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241589, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.237713] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.778s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 747.238385] env[69994]: DEBUG nova.compute.manager [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 747.241746] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c5fe9601-7ff8-47f3-840a-4fe60da93a99 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.861s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 747.241854] env[69994]: DEBUG nova.objects.instance [None req-c5fe9601-7ff8-47f3-840a-4fe60da93a99 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Lazy-loading 'resources' on Instance uuid 0bfe4393-5b2a-487f-ba7a-858ed4c861a5 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 747.288501] env[69994]: INFO nova.compute.manager [None req-f37e5438-7d41-4bdb-ac9b-6aade86e6389 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Detaching volume 50fa937b-e572-4847-88c1-bfd627eacc5e [ 747.342512] env[69994]: INFO nova.virt.block_device [None req-f37e5438-7d41-4bdb-ac9b-6aade86e6389 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Attempting to driver detach volume 50fa937b-e572-4847-88c1-bfd627eacc5e from mountpoint /dev/sdb [ 747.342770] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f37e5438-7d41-4bdb-ac9b-6aade86e6389 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Volume detach. Driver type: vmdk {{(pid=69994) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 747.342989] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f37e5438-7d41-4bdb-ac9b-6aade86e6389 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647835', 'volume_id': '50fa937b-e572-4847-88c1-bfd627eacc5e', 'name': 'volume-50fa937b-e572-4847-88c1-bfd627eacc5e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '53a8714c-50f7-4990-a3d9-86f8fc908d03', 'attached_at': '', 'detached_at': '', 'volume_id': '50fa937b-e572-4847-88c1-bfd627eacc5e', 'serial': '50fa937b-e572-4847-88c1-bfd627eacc5e'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 747.343960] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01437d63-f6e2-42af-aeb1-4b25bb74e007 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.369145] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29258f5a-fd62-4688-85bf-0fde7a5d0fc5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.378565] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3f81e7d-47ec-41a3-8254-3be0b703a66a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.406800] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b7c9918-4bd6-4c84-ae65-b10746473b94 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.435351] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f37e5438-7d41-4bdb-ac9b-6aade86e6389 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] The volume has not been displaced from its original location: [datastore1] volume-50fa937b-e572-4847-88c1-bfd627eacc5e/volume-50fa937b-e572-4847-88c1-bfd627eacc5e.vmdk. No consolidation needed. {{(pid=69994) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 747.442325] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f37e5438-7d41-4bdb-ac9b-6aade86e6389 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Reconfiguring VM instance instance-0000000a to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 747.442799] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6ed217ff-b54b-4003-83a0-99b973eccaf1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.466307] env[69994]: DEBUG oslo_vmware.api [None req-f37e5438-7d41-4bdb-ac9b-6aade86e6389 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] Waiting for the task: (returnval){ [ 747.466307] env[69994]: value = "task-3241591" [ 747.466307] env[69994]: _type = "Task" [ 747.466307] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.476275] env[69994]: DEBUG oslo_vmware.api [None req-f37e5438-7d41-4bdb-ac9b-6aade86e6389 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] Task: {'id': task-3241591, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.565438] env[69994]: DEBUG oslo_concurrency.lockutils [req-1c1f61d7-e08f-452f-a098-b08fc0ab923d req-4e24ef29-5e93-4408-80ff-56ea1ebc39d8 service nova] Releasing lock "refresh_cache-289cbcc2-cd8f-4c4f-9169-a897f5527de1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 747.612846] env[69994]: DEBUG oslo_vmware.api [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Task: {'id': task-3241590, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.622025] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 747.624471] env[69994]: DEBUG oslo_vmware.api [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241589, 'name': CloneVM_Task} progress is 95%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.741394] env[69994]: DEBUG nova.compute.manager [req-95f3b485-d25c-47ab-a765-4e581c690fe3 req-25196b83-5d8d-4bfb-88cb-647d8be7fc4f service nova] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Received event network-changed-18ed1bc7-f241-4d6e-83f9-4df1b8b70c45 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 747.741394] env[69994]: DEBUG nova.compute.manager [req-95f3b485-d25c-47ab-a765-4e581c690fe3 req-25196b83-5d8d-4bfb-88cb-647d8be7fc4f service nova] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Refreshing instance network info cache due to event network-changed-18ed1bc7-f241-4d6e-83f9-4df1b8b70c45. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 747.741394] env[69994]: DEBUG oslo_concurrency.lockutils [req-95f3b485-d25c-47ab-a765-4e581c690fe3 req-25196b83-5d8d-4bfb-88cb-647d8be7fc4f service nova] Acquiring lock "refresh_cache-ce6f9a88-faa8-442e-8b48-64979dd2d03e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.741394] env[69994]: DEBUG oslo_concurrency.lockutils [req-95f3b485-d25c-47ab-a765-4e581c690fe3 req-25196b83-5d8d-4bfb-88cb-647d8be7fc4f service nova] Acquired lock "refresh_cache-ce6f9a88-faa8-442e-8b48-64979dd2d03e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 747.741394] env[69994]: DEBUG nova.network.neutron [req-95f3b485-d25c-47ab-a765-4e581c690fe3 req-25196b83-5d8d-4bfb-88cb-647d8be7fc4f service nova] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Refreshing network info cache for port 18ed1bc7-f241-4d6e-83f9-4df1b8b70c45 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 747.749047] env[69994]: DEBUG nova.compute.utils [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 747.751465] env[69994]: DEBUG nova.compute.manager [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 747.751645] env[69994]: DEBUG nova.network.neutron [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 747.891734] env[69994]: DEBUG nova.policy [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bb1874902bc24959b717674a99e530a9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ee188ea80c9847188df8b8482b7c6ec7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 747.976547] env[69994]: DEBUG oslo_vmware.api [None req-f37e5438-7d41-4bdb-ac9b-6aade86e6389 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] Task: {'id': task-3241591, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.111890] env[69994]: DEBUG oslo_vmware.api [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Task: {'id': task-3241590, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.659259} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.118630] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 289cbcc2-cd8f-4c4f-9169-a897f5527de1/289cbcc2-cd8f-4c4f-9169-a897f5527de1.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 748.118977] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 748.122709] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9b1a6c51-ebb3-4e21-8c0b-b503d04ac8f7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.125016] env[69994]: DEBUG oslo_vmware.api [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241589, 'name': CloneVM_Task, 'duration_secs': 1.813716} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.125489] env[69994]: INFO nova.virt.vmwareapi.vmops [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Created linked-clone VM from snapshot [ 748.126601] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8713ca34-0572-4e5f-a7b5-3653937961fe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.133494] env[69994]: DEBUG oslo_vmware.api [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Waiting for the task: (returnval){ [ 748.133494] env[69994]: value = "task-3241592" [ 748.133494] env[69994]: _type = "Task" [ 748.133494] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.137286] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Uploading image e0bcc3a4-b58a-42ff-a3b7-a158a5974e40 {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 748.155783] env[69994]: DEBUG oslo_vmware.api [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Task: {'id': task-3241592, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.164912] env[69994]: DEBUG oslo_vmware.rw_handles [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 748.164912] env[69994]: value = "vm-647843" [ 748.164912] env[69994]: _type = "VirtualMachine" [ 748.164912] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 748.165251] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-bcdabf45-8765-4875-a09d-f1a1e64aeb52 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.176466] env[69994]: DEBUG oslo_vmware.rw_handles [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Lease: (returnval){ [ 748.176466] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52134c3c-df10-a566-b261-1b655879f4a5" [ 748.176466] env[69994]: _type = "HttpNfcLease" [ 748.176466] env[69994]: } obtained for exporting VM: (result){ [ 748.176466] env[69994]: value = "vm-647843" [ 748.176466] env[69994]: _type = "VirtualMachine" [ 748.176466] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 748.176987] env[69994]: DEBUG oslo_vmware.api [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Waiting for the lease: (returnval){ [ 748.176987] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52134c3c-df10-a566-b261-1b655879f4a5" [ 748.176987] env[69994]: _type = "HttpNfcLease" [ 748.176987] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 748.185695] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 748.185695] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52134c3c-df10-a566-b261-1b655879f4a5" [ 748.185695] env[69994]: _type = "HttpNfcLease" [ 748.185695] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 748.255785] env[69994]: DEBUG nova.compute.manager [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 748.389410] env[69994]: DEBUG nova.network.neutron [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Successfully updated port: b5372a7b-7de1-4258-959a-ce83afa3070b {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 748.446206] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48ae7b5f-f8c5-457f-b9e8-55abefb78c2f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.457086] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d680437b-cff9-4840-a3b1-7bc9ba5aed32 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.495256] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7308ce18-c9fc-4557-b408-262ae156135c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.509107] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ce34e66-3d76-485a-9d87-a04cb14ce631 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.513229] env[69994]: DEBUG oslo_vmware.api [None req-f37e5438-7d41-4bdb-ac9b-6aade86e6389 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] Task: {'id': task-3241591, 'name': ReconfigVM_Task, 'duration_secs': 0.534236} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.513513] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f37e5438-7d41-4bdb-ac9b-6aade86e6389 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Reconfigured VM instance instance-0000000a to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 748.518701] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c9f694bd-855e-450f-b55f-8daf9f1729ce {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.536945] env[69994]: DEBUG nova.compute.provider_tree [None req-c5fe9601-7ff8-47f3-840a-4fe60da93a99 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 748.544803] env[69994]: DEBUG oslo_vmware.api [None req-f37e5438-7d41-4bdb-ac9b-6aade86e6389 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] Waiting for the task: (returnval){ [ 748.544803] env[69994]: value = "task-3241594" [ 748.544803] env[69994]: _type = "Task" [ 748.544803] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.558279] env[69994]: DEBUG oslo_vmware.api [None req-f37e5438-7d41-4bdb-ac9b-6aade86e6389 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] Task: {'id': task-3241594, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.652552] env[69994]: DEBUG oslo_vmware.api [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Task: {'id': task-3241592, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.201546} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.657026] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 748.657026] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-621b748c-117f-4255-bf74-f7d6c3a82ee2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.685858] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] 289cbcc2-cd8f-4c4f-9169-a897f5527de1/289cbcc2-cd8f-4c4f-9169-a897f5527de1.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 748.686769] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f9e1ec20-7c50-4f9a-9c33-759733679c3f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.710411] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 748.710411] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52134c3c-df10-a566-b261-1b655879f4a5" [ 748.710411] env[69994]: _type = "HttpNfcLease" [ 748.710411] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 748.712358] env[69994]: DEBUG oslo_vmware.rw_handles [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 748.712358] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52134c3c-df10-a566-b261-1b655879f4a5" [ 748.712358] env[69994]: _type = "HttpNfcLease" [ 748.712358] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 748.712847] env[69994]: DEBUG oslo_vmware.api [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Waiting for the task: (returnval){ [ 748.712847] env[69994]: value = "task-3241595" [ 748.712847] env[69994]: _type = "Task" [ 748.712847] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.713950] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b271b13e-0575-4cac-a697-62815826b781 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.728848] env[69994]: DEBUG oslo_vmware.api [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Task: {'id': task-3241595, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.732114] env[69994]: DEBUG oslo_vmware.rw_handles [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528b36bd-ab55-1e1d-a1b1-aed01571b1e7/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 748.732329] env[69994]: DEBUG oslo_vmware.rw_handles [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528b36bd-ab55-1e1d-a1b1-aed01571b1e7/disk-0.vmdk for reading. {{(pid=69994) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 748.810703] env[69994]: DEBUG nova.network.neutron [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Successfully created port: 42c8060b-e0e2-4cd5-acdb-812565e57ee8 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 748.845806] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-2cb75ab8-76bb-464c-9d67-245a4cee790d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.898498] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Acquiring lock "refresh_cache-3c2c8a40-919d-4280-b9be-f8d95b1a263e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.898498] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Acquired lock "refresh_cache-3c2c8a40-919d-4280-b9be-f8d95b1a263e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 748.898498] env[69994]: DEBUG nova.network.neutron [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 748.915419] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a8ad7306-023f-4618-bd7d-525ece4c7641 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Acquiring lock "7e7953f7-ed5d-4515-9181-93d343ad772d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 748.915670] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a8ad7306-023f-4618-bd7d-525ece4c7641 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Lock "7e7953f7-ed5d-4515-9181-93d343ad772d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 748.915878] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a8ad7306-023f-4618-bd7d-525ece4c7641 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Acquiring lock "7e7953f7-ed5d-4515-9181-93d343ad772d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 748.916095] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a8ad7306-023f-4618-bd7d-525ece4c7641 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Lock "7e7953f7-ed5d-4515-9181-93d343ad772d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 748.916269] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a8ad7306-023f-4618-bd7d-525ece4c7641 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Lock "7e7953f7-ed5d-4515-9181-93d343ad772d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 748.919864] env[69994]: INFO nova.compute.manager [None req-a8ad7306-023f-4618-bd7d-525ece4c7641 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Terminating instance [ 748.931958] env[69994]: DEBUG nova.network.neutron [req-95f3b485-d25c-47ab-a765-4e581c690fe3 req-25196b83-5d8d-4bfb-88cb-647d8be7fc4f service nova] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Updated VIF entry in instance network info cache for port 18ed1bc7-f241-4d6e-83f9-4df1b8b70c45. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 748.932312] env[69994]: DEBUG nova.network.neutron [req-95f3b485-d25c-47ab-a765-4e581c690fe3 req-25196b83-5d8d-4bfb-88cb-647d8be7fc4f service nova] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Updating instance_info_cache with network_info: [{"id": "18ed1bc7-f241-4d6e-83f9-4df1b8b70c45", "address": "fa:16:3e:4b:69:6f", "network": {"id": "74734ae5-86fb-4652-8203-198f5d02a70f", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-585354620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "dfa35e60b54941dfbfb8671758ccd039", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18ed1bc7-f2", "ovs_interfaceid": "18ed1bc7-f241-4d6e-83f9-4df1b8b70c45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.043883] env[69994]: DEBUG nova.scheduler.client.report [None req-c5fe9601-7ff8-47f3-840a-4fe60da93a99 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 749.057892] env[69994]: DEBUG oslo_vmware.api [None req-f37e5438-7d41-4bdb-ac9b-6aade86e6389 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] Task: {'id': task-3241594, 'name': ReconfigVM_Task, 'duration_secs': 0.243754} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.058163] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f37e5438-7d41-4bdb-ac9b-6aade86e6389 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647835', 'volume_id': '50fa937b-e572-4847-88c1-bfd627eacc5e', 'name': 'volume-50fa937b-e572-4847-88c1-bfd627eacc5e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '53a8714c-50f7-4990-a3d9-86f8fc908d03', 'attached_at': '', 'detached_at': '', 'volume_id': '50fa937b-e572-4847-88c1-bfd627eacc5e', 'serial': '50fa937b-e572-4847-88c1-bfd627eacc5e'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 749.228670] env[69994]: DEBUG oslo_vmware.api [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Task: {'id': task-3241595, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.313988] env[69994]: DEBUG nova.compute.manager [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 749.347268] env[69994]: DEBUG nova.virt.hardware [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 749.347498] env[69994]: DEBUG nova.virt.hardware [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 749.347666] env[69994]: DEBUG nova.virt.hardware [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 749.347900] env[69994]: DEBUG nova.virt.hardware [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 749.349656] env[69994]: DEBUG nova.virt.hardware [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 749.351288] env[69994]: DEBUG nova.virt.hardware [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 749.351534] env[69994]: DEBUG nova.virt.hardware [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 749.351700] env[69994]: DEBUG nova.virt.hardware [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 749.351874] env[69994]: DEBUG nova.virt.hardware [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 749.352187] env[69994]: DEBUG nova.virt.hardware [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 749.352255] env[69994]: DEBUG nova.virt.hardware [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 749.353147] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24d56c49-8c4f-49c0-bf02-ff0a70e888df {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.370035] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cca3466-b59b-48c8-8fb7-1365472b4028 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.430020] env[69994]: DEBUG nova.compute.manager [None req-a8ad7306-023f-4618-bd7d-525ece4c7641 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 749.430020] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a8ad7306-023f-4618-bd7d-525ece4c7641 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 749.430020] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8e27bc8-122d-48d6-a4f4-6d53967db673 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.435374] env[69994]: DEBUG oslo_concurrency.lockutils [req-95f3b485-d25c-47ab-a765-4e581c690fe3 req-25196b83-5d8d-4bfb-88cb-647d8be7fc4f service nova] Releasing lock "refresh_cache-ce6f9a88-faa8-442e-8b48-64979dd2d03e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 749.438496] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8ad7306-023f-4618-bd7d-525ece4c7641 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 749.438946] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-468eb643-77ba-4888-92b3-0ed91894d605 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.448192] env[69994]: DEBUG oslo_vmware.api [None req-a8ad7306-023f-4618-bd7d-525ece4c7641 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Waiting for the task: (returnval){ [ 749.448192] env[69994]: value = "task-3241596" [ 749.448192] env[69994]: _type = "Task" [ 749.448192] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.467029] env[69994]: DEBUG oslo_vmware.api [None req-a8ad7306-023f-4618-bd7d-525ece4c7641 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3241596, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.480020] env[69994]: DEBUG nova.network.neutron [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 749.552055] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c5fe9601-7ff8-47f3-840a-4fe60da93a99 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.310s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 749.555995] env[69994]: DEBUG oslo_concurrency.lockutils [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.591s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 749.559123] env[69994]: INFO nova.compute.claims [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 749.589071] env[69994]: INFO nova.scheduler.client.report [None req-c5fe9601-7ff8-47f3-840a-4fe60da93a99 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Deleted allocations for instance 0bfe4393-5b2a-487f-ba7a-858ed4c861a5 [ 749.691985] env[69994]: DEBUG nova.objects.instance [None req-f37e5438-7d41-4bdb-ac9b-6aade86e6389 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] Lazy-loading 'flavor' on Instance uuid 53a8714c-50f7-4990-a3d9-86f8fc908d03 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 749.729580] env[69994]: DEBUG oslo_vmware.api [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Task: {'id': task-3241595, 'name': ReconfigVM_Task, 'duration_secs': 0.666754} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.730359] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Reconfigured VM instance instance-00000024 to attach disk [datastore1] 289cbcc2-cd8f-4c4f-9169-a897f5527de1/289cbcc2-cd8f-4c4f-9169-a897f5527de1.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 749.730525] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-71772933-5530-4993-a586-bafb3ec1b5cf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.737392] env[69994]: DEBUG nova.network.neutron [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Updating instance_info_cache with network_info: [{"id": "b5372a7b-7de1-4258-959a-ce83afa3070b", "address": "fa:16:3e:15:56:14", "network": {"id": "b2531c58-bc24-425c-abbb-1dc5508ce7da", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1416309400-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7668e2fbee0a4948a0cbdbd964e764e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3753f451-fa23-4988-9361-074fb0bd3fd4", "external-id": "nsx-vlan-transportzone-440", "segmentation_id": 440, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5372a7b-7d", "ovs_interfaceid": "b5372a7b-7de1-4258-959a-ce83afa3070b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.742071] env[69994]: DEBUG oslo_vmware.api [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Waiting for the task: (returnval){ [ 749.742071] env[69994]: value = "task-3241597" [ 749.742071] env[69994]: _type = "Task" [ 749.742071] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.761244] env[69994]: DEBUG oslo_vmware.api [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Task: {'id': task-3241597, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.962830] env[69994]: DEBUG oslo_vmware.api [None req-a8ad7306-023f-4618-bd7d-525ece4c7641 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3241596, 'name': PowerOffVM_Task, 'duration_secs': 0.221275} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.964294] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8ad7306-023f-4618-bd7d-525ece4c7641 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 749.964520] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a8ad7306-023f-4618-bd7d-525ece4c7641 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 749.964799] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3b639c60-4862-4f85-beaa-2d89e0732a0d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.005981] env[69994]: DEBUG nova.compute.manager [req-b6a89af4-2e24-4927-8b66-f27ad7884dce req-10c5aaaf-c3a8-4427-b9c3-819bd992fda0 service nova] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Received event network-changed-46e255ba-a2d4-4bd1-942b-f18624bd0198 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 750.006399] env[69994]: DEBUG nova.compute.manager [req-b6a89af4-2e24-4927-8b66-f27ad7884dce req-10c5aaaf-c3a8-4427-b9c3-819bd992fda0 service nova] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Refreshing instance network info cache due to event network-changed-46e255ba-a2d4-4bd1-942b-f18624bd0198. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 750.006399] env[69994]: DEBUG oslo_concurrency.lockutils [req-b6a89af4-2e24-4927-8b66-f27ad7884dce req-10c5aaaf-c3a8-4427-b9c3-819bd992fda0 service nova] Acquiring lock "refresh_cache-744fe018-d12c-44c2-98f1-c11fbfffc98e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.007342] env[69994]: DEBUG oslo_concurrency.lockutils [req-b6a89af4-2e24-4927-8b66-f27ad7884dce req-10c5aaaf-c3a8-4427-b9c3-819bd992fda0 service nova] Acquired lock "refresh_cache-744fe018-d12c-44c2-98f1-c11fbfffc98e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 750.007342] env[69994]: DEBUG nova.network.neutron [req-b6a89af4-2e24-4927-8b66-f27ad7884dce req-10c5aaaf-c3a8-4427-b9c3-819bd992fda0 service nova] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Refreshing network info cache for port 46e255ba-a2d4-4bd1-942b-f18624bd0198 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 750.051024] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a8ad7306-023f-4618-bd7d-525ece4c7641 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 750.051024] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a8ad7306-023f-4618-bd7d-525ece4c7641 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 750.051024] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8ad7306-023f-4618-bd7d-525ece4c7641 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Deleting the datastore file [datastore1] 7e7953f7-ed5d-4515-9181-93d343ad772d {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 750.051024] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-db8378b6-d0b5-4506-9f4d-56f20b0f2073 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.065062] env[69994]: DEBUG oslo_vmware.api [None req-a8ad7306-023f-4618-bd7d-525ece4c7641 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Waiting for the task: (returnval){ [ 750.065062] env[69994]: value = "task-3241599" [ 750.065062] env[69994]: _type = "Task" [ 750.065062] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.078713] env[69994]: DEBUG oslo_vmware.api [None req-a8ad7306-023f-4618-bd7d-525ece4c7641 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3241599, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.102565] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c5fe9601-7ff8-47f3-840a-4fe60da93a99 tempest-TenantUsagesTestJSON-1659715334 tempest-TenantUsagesTestJSON-1659715334-project-member] Lock "0bfe4393-5b2a-487f-ba7a-858ed4c861a5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.560s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 750.241566] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Releasing lock "refresh_cache-3c2c8a40-919d-4280-b9be-f8d95b1a263e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 750.244030] env[69994]: DEBUG nova.compute.manager [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Instance network_info: |[{"id": "b5372a7b-7de1-4258-959a-ce83afa3070b", "address": "fa:16:3e:15:56:14", "network": {"id": "b2531c58-bc24-425c-abbb-1dc5508ce7da", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1416309400-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7668e2fbee0a4948a0cbdbd964e764e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3753f451-fa23-4988-9361-074fb0bd3fd4", "external-id": "nsx-vlan-transportzone-440", "segmentation_id": 440, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5372a7b-7d", "ovs_interfaceid": "b5372a7b-7de1-4258-959a-ce83afa3070b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 750.244914] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:15:56:14', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3753f451-fa23-4988-9361-074fb0bd3fd4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b5372a7b-7de1-4258-959a-ce83afa3070b', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 750.252634] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Creating folder: Project (7668e2fbee0a4948a0cbdbd964e764e4). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 750.257149] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-349b045a-c39d-4114-aaed-d844cc82ca12 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.264758] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Acquiring lock "a4544bc9-6935-4825-9b45-2054d2ced330" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 750.265074] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Lock "a4544bc9-6935-4825-9b45-2054d2ced330" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 750.276727] env[69994]: DEBUG oslo_vmware.api [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Task: {'id': task-3241597, 'name': Rename_Task, 'duration_secs': 0.193712} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.279155] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 750.279155] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-da84bd2a-ba3b-4f9a-bf79-aaa02f9a3cb0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.286359] env[69994]: DEBUG oslo_vmware.api [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Waiting for the task: (returnval){ [ 750.286359] env[69994]: value = "task-3241601" [ 750.286359] env[69994]: _type = "Task" [ 750.286359] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.291946] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Created folder: Project (7668e2fbee0a4948a0cbdbd964e764e4) in parent group-v647729. [ 750.291946] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Creating folder: Instances. Parent ref: group-v647844. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 750.292587] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-720a5460-fa49-4e2f-ae2a-4dabfff0a04a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.300676] env[69994]: DEBUG oslo_vmware.api [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Task: {'id': task-3241601, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.305386] env[69994]: DEBUG nova.compute.manager [req-680f58b5-2576-46d5-84b3-a6bf73865bfd req-023d7ad3-7da5-47f4-a7bf-c743297ce027 service nova] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Received event network-changed-18ed1bc7-f241-4d6e-83f9-4df1b8b70c45 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 750.305386] env[69994]: DEBUG nova.compute.manager [req-680f58b5-2576-46d5-84b3-a6bf73865bfd req-023d7ad3-7da5-47f4-a7bf-c743297ce027 service nova] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Refreshing instance network info cache due to event network-changed-18ed1bc7-f241-4d6e-83f9-4df1b8b70c45. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 750.306814] env[69994]: DEBUG oslo_concurrency.lockutils [req-680f58b5-2576-46d5-84b3-a6bf73865bfd req-023d7ad3-7da5-47f4-a7bf-c743297ce027 service nova] Acquiring lock "refresh_cache-ce6f9a88-faa8-442e-8b48-64979dd2d03e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.306814] env[69994]: DEBUG oslo_concurrency.lockutils [req-680f58b5-2576-46d5-84b3-a6bf73865bfd req-023d7ad3-7da5-47f4-a7bf-c743297ce027 service nova] Acquired lock "refresh_cache-ce6f9a88-faa8-442e-8b48-64979dd2d03e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 750.306814] env[69994]: DEBUG nova.network.neutron [req-680f58b5-2576-46d5-84b3-a6bf73865bfd req-023d7ad3-7da5-47f4-a7bf-c743297ce027 service nova] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Refreshing network info cache for port 18ed1bc7-f241-4d6e-83f9-4df1b8b70c45 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 750.309819] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Created folder: Instances in parent group-v647844. [ 750.310766] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 750.311548] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 750.311986] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4ee521d8-e5a2-4580-843b-b12750b1ef03 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.343429] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 750.343429] env[69994]: value = "task-3241603" [ 750.343429] env[69994]: _type = "Task" [ 750.343429] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.362162] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241603, 'name': CreateVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.583026] env[69994]: DEBUG oslo_vmware.api [None req-a8ad7306-023f-4618-bd7d-525ece4c7641 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3241599, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.276761} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.583128] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8ad7306-023f-4618-bd7d-525ece4c7641 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 750.583409] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a8ad7306-023f-4618-bd7d-525ece4c7641 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 750.583662] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a8ad7306-023f-4618-bd7d-525ece4c7641 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 750.583887] env[69994]: INFO nova.compute.manager [None req-a8ad7306-023f-4618-bd7d-525ece4c7641 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Took 1.16 seconds to destroy the instance on the hypervisor. [ 750.587046] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a8ad7306-023f-4618-bd7d-525ece4c7641 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 750.587046] env[69994]: DEBUG nova.compute.manager [-] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 750.587046] env[69994]: DEBUG nova.network.neutron [-] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 750.702542] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f37e5438-7d41-4bdb-ac9b-6aade86e6389 tempest-VolumesAssistedSnapshotsTest-198719812 tempest-VolumesAssistedSnapshotsTest-198719812-project-admin] Lock "53a8714c-50f7-4990-a3d9-86f8fc908d03" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.917s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 750.803149] env[69994]: DEBUG oslo_vmware.api [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Task: {'id': task-3241601, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.857507] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241603, 'name': CreateVM_Task, 'duration_secs': 0.447781} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.858046] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 750.858906] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.859068] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 750.859531] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 750.860318] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0afe191-b050-45a4-bac5-ed005550d450 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.866700] env[69994]: DEBUG oslo_vmware.api [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Waiting for the task: (returnval){ [ 750.866700] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52e2eec6-a84b-2b75-fe21-ee2fee446e94" [ 750.866700] env[69994]: _type = "Task" [ 750.866700] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.885924] env[69994]: DEBUG oslo_vmware.api [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52e2eec6-a84b-2b75-fe21-ee2fee446e94, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.930069] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c78d3c1c-0a6c-4f60-a8a6-546ffd57861f tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Acquiring lock "ce6f9a88-faa8-442e-8b48-64979dd2d03e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 750.930069] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c78d3c1c-0a6c-4f60-a8a6-546ffd57861f tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Lock "ce6f9a88-faa8-442e-8b48-64979dd2d03e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 750.930069] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c78d3c1c-0a6c-4f60-a8a6-546ffd57861f tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Acquiring lock "ce6f9a88-faa8-442e-8b48-64979dd2d03e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 750.930299] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c78d3c1c-0a6c-4f60-a8a6-546ffd57861f tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Lock "ce6f9a88-faa8-442e-8b48-64979dd2d03e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 750.930299] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c78d3c1c-0a6c-4f60-a8a6-546ffd57861f tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Lock "ce6f9a88-faa8-442e-8b48-64979dd2d03e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 750.936086] env[69994]: INFO nova.compute.manager [None req-c78d3c1c-0a6c-4f60-a8a6-546ffd57861f tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Terminating instance [ 751.066696] env[69994]: DEBUG nova.network.neutron [req-b6a89af4-2e24-4927-8b66-f27ad7884dce req-10c5aaaf-c3a8-4427-b9c3-819bd992fda0 service nova] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Updated VIF entry in instance network info cache for port 46e255ba-a2d4-4bd1-942b-f18624bd0198. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 751.067100] env[69994]: DEBUG nova.network.neutron [req-b6a89af4-2e24-4927-8b66-f27ad7884dce req-10c5aaaf-c3a8-4427-b9c3-819bd992fda0 service nova] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Updating instance_info_cache with network_info: [{"id": "46e255ba-a2d4-4bd1-942b-f18624bd0198", "address": "fa:16:3e:e0:78:98", "network": {"id": "6b605f54-2fa6-407e-b3f4-2ce9853b449e", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1428465928-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91697b639b15438297eec6880a72f444", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46e255ba-a2", "ovs_interfaceid": "46e255ba-a2d4-4bd1-942b-f18624bd0198", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.201882] env[69994]: DEBUG nova.network.neutron [req-680f58b5-2576-46d5-84b3-a6bf73865bfd req-023d7ad3-7da5-47f4-a7bf-c743297ce027 service nova] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Updated VIF entry in instance network info cache for port 18ed1bc7-f241-4d6e-83f9-4df1b8b70c45. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 751.202246] env[69994]: DEBUG nova.network.neutron [req-680f58b5-2576-46d5-84b3-a6bf73865bfd req-023d7ad3-7da5-47f4-a7bf-c743297ce027 service nova] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Updating instance_info_cache with network_info: [{"id": "18ed1bc7-f241-4d6e-83f9-4df1b8b70c45", "address": "fa:16:3e:4b:69:6f", "network": {"id": "74734ae5-86fb-4652-8203-198f5d02a70f", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-585354620-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "dfa35e60b54941dfbfb8671758ccd039", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18ed1bc7-f2", "ovs_interfaceid": "18ed1bc7-f241-4d6e-83f9-4df1b8b70c45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.295406] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcbdbf15-8ee7-4e9d-bd76-4ea34bdd6884 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.308396] env[69994]: DEBUG oslo_vmware.api [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Task: {'id': task-3241601, 'name': PowerOnVM_Task, 'duration_secs': 0.778471} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.310459] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 751.310658] env[69994]: INFO nova.compute.manager [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Took 9.44 seconds to spawn the instance on the hypervisor. [ 751.310849] env[69994]: DEBUG nova.compute.manager [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 751.311685] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31f361e2-a707-4936-97d2-3a01cf1f20f7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.315201] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1351aced-6f2f-4614-8bce-46777bbeaaca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.364598] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73a8eaa8-d830-4670-a656-43d9d3e1cedf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.380990] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30241cb9-5438-4a3f-bb4b-c6e49999d607 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.393807] env[69994]: DEBUG oslo_vmware.api [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52e2eec6-a84b-2b75-fe21-ee2fee446e94, 'name': SearchDatastore_Task, 'duration_secs': 0.068652} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.394195] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 751.394459] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 751.394790] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.394913] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 751.395147] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 751.395449] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-83e1db0c-b2ba-4ce1-a1d5-9e62ac4f4013 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.405822] env[69994]: DEBUG nova.compute.provider_tree [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 751.421810] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 751.421810] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 751.422552] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e350ae1-5575-4d0e-ba6e-8f83be64ab66 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.429496] env[69994]: DEBUG oslo_vmware.api [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Waiting for the task: (returnval){ [ 751.429496] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52da8dfa-e1f2-aa1e-f32f-d4d5d3c8e917" [ 751.429496] env[69994]: _type = "Task" [ 751.429496] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.438258] env[69994]: DEBUG oslo_vmware.api [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52da8dfa-e1f2-aa1e-f32f-d4d5d3c8e917, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.442991] env[69994]: DEBUG nova.compute.manager [None req-c78d3c1c-0a6c-4f60-a8a6-546ffd57861f tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 751.443211] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c78d3c1c-0a6c-4f60-a8a6-546ffd57861f tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 751.444079] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02e965b9-6d88-44cc-82f5-4986e5ff73ab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.452824] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c78d3c1c-0a6c-4f60-a8a6-546ffd57861f tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 751.453095] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-52e8b899-71da-43ce-98aa-52997ffbed08 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.461074] env[69994]: DEBUG oslo_vmware.api [None req-c78d3c1c-0a6c-4f60-a8a6-546ffd57861f tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Waiting for the task: (returnval){ [ 751.461074] env[69994]: value = "task-3241605" [ 751.461074] env[69994]: _type = "Task" [ 751.461074] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.470131] env[69994]: DEBUG oslo_vmware.api [None req-c78d3c1c-0a6c-4f60-a8a6-546ffd57861f tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Task: {'id': task-3241605, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.571167] env[69994]: DEBUG oslo_concurrency.lockutils [req-b6a89af4-2e24-4927-8b66-f27ad7884dce req-10c5aaaf-c3a8-4427-b9c3-819bd992fda0 service nova] Releasing lock "refresh_cache-744fe018-d12c-44c2-98f1-c11fbfffc98e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 751.635116] env[69994]: DEBUG nova.network.neutron [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Successfully updated port: 42c8060b-e0e2-4cd5-acdb-812565e57ee8 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 751.705380] env[69994]: DEBUG oslo_concurrency.lockutils [req-680f58b5-2576-46d5-84b3-a6bf73865bfd req-023d7ad3-7da5-47f4-a7bf-c743297ce027 service nova] Releasing lock "refresh_cache-ce6f9a88-faa8-442e-8b48-64979dd2d03e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 751.706295] env[69994]: DEBUG nova.compute.manager [req-680f58b5-2576-46d5-84b3-a6bf73865bfd req-023d7ad3-7da5-47f4-a7bf-c743297ce027 service nova] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Received event network-vif-plugged-b5372a7b-7de1-4258-959a-ce83afa3070b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 751.706295] env[69994]: DEBUG oslo_concurrency.lockutils [req-680f58b5-2576-46d5-84b3-a6bf73865bfd req-023d7ad3-7da5-47f4-a7bf-c743297ce027 service nova] Acquiring lock "3c2c8a40-919d-4280-b9be-f8d95b1a263e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 751.706443] env[69994]: DEBUG oslo_concurrency.lockutils [req-680f58b5-2576-46d5-84b3-a6bf73865bfd req-023d7ad3-7da5-47f4-a7bf-c743297ce027 service nova] Lock "3c2c8a40-919d-4280-b9be-f8d95b1a263e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 751.706705] env[69994]: DEBUG oslo_concurrency.lockutils [req-680f58b5-2576-46d5-84b3-a6bf73865bfd req-023d7ad3-7da5-47f4-a7bf-c743297ce027 service nova] Lock "3c2c8a40-919d-4280-b9be-f8d95b1a263e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 751.706956] env[69994]: DEBUG nova.compute.manager [req-680f58b5-2576-46d5-84b3-a6bf73865bfd req-023d7ad3-7da5-47f4-a7bf-c743297ce027 service nova] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] No waiting events found dispatching network-vif-plugged-b5372a7b-7de1-4258-959a-ce83afa3070b {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 751.707205] env[69994]: WARNING nova.compute.manager [req-680f58b5-2576-46d5-84b3-a6bf73865bfd req-023d7ad3-7da5-47f4-a7bf-c743297ce027 service nova] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Received unexpected event network-vif-plugged-b5372a7b-7de1-4258-959a-ce83afa3070b for instance with vm_state building and task_state spawning. [ 751.707493] env[69994]: DEBUG nova.compute.manager [req-680f58b5-2576-46d5-84b3-a6bf73865bfd req-023d7ad3-7da5-47f4-a7bf-c743297ce027 service nova] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Received event network-changed-b5372a7b-7de1-4258-959a-ce83afa3070b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 751.707797] env[69994]: DEBUG nova.compute.manager [req-680f58b5-2576-46d5-84b3-a6bf73865bfd req-023d7ad3-7da5-47f4-a7bf-c743297ce027 service nova] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Refreshing instance network info cache due to event network-changed-b5372a7b-7de1-4258-959a-ce83afa3070b. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 751.709102] env[69994]: DEBUG oslo_concurrency.lockutils [req-680f58b5-2576-46d5-84b3-a6bf73865bfd req-023d7ad3-7da5-47f4-a7bf-c743297ce027 service nova] Acquiring lock "refresh_cache-3c2c8a40-919d-4280-b9be-f8d95b1a263e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.709318] env[69994]: DEBUG oslo_concurrency.lockutils [req-680f58b5-2576-46d5-84b3-a6bf73865bfd req-023d7ad3-7da5-47f4-a7bf-c743297ce027 service nova] Acquired lock "refresh_cache-3c2c8a40-919d-4280-b9be-f8d95b1a263e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 751.709609] env[69994]: DEBUG nova.network.neutron [req-680f58b5-2576-46d5-84b3-a6bf73865bfd req-023d7ad3-7da5-47f4-a7bf-c743297ce027 service nova] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Refreshing network info cache for port b5372a7b-7de1-4258-959a-ce83afa3070b {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 751.877219] env[69994]: INFO nova.compute.manager [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Took 57.37 seconds to build instance. [ 751.909960] env[69994]: DEBUG nova.scheduler.client.report [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 751.944032] env[69994]: DEBUG oslo_vmware.api [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52da8dfa-e1f2-aa1e-f32f-d4d5d3c8e917, 'name': SearchDatastore_Task, 'duration_secs': 0.020851} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.944566] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0afe3770-d62d-4901-8d21-f38fee584e9a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.953815] env[69994]: DEBUG oslo_vmware.api [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Waiting for the task: (returnval){ [ 751.953815] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]529b6f71-6263-066a-aa0c-dc8087c06845" [ 751.953815] env[69994]: _type = "Task" [ 751.953815] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.963929] env[69994]: DEBUG oslo_vmware.api [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]529b6f71-6263-066a-aa0c-dc8087c06845, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.974360] env[69994]: DEBUG oslo_vmware.api [None req-c78d3c1c-0a6c-4f60-a8a6-546ffd57861f tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Task: {'id': task-3241605, 'name': PowerOffVM_Task, 'duration_secs': 0.416221} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.974783] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c78d3c1c-0a6c-4f60-a8a6-546ffd57861f tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 751.974866] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c78d3c1c-0a6c-4f60-a8a6-546ffd57861f tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 751.975121] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bfe4c0b5-d9d2-497d-ad62-8b72a35a4625 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.058496] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c78d3c1c-0a6c-4f60-a8a6-546ffd57861f tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 752.058800] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c78d3c1c-0a6c-4f60-a8a6-546ffd57861f tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 752.059049] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c78d3c1c-0a6c-4f60-a8a6-546ffd57861f tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Deleting the datastore file [datastore2] ce6f9a88-faa8-442e-8b48-64979dd2d03e {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 752.059464] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-01e9a3db-ca48-42fd-ba16-44e579dd202f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.070514] env[69994]: DEBUG oslo_vmware.api [None req-c78d3c1c-0a6c-4f60-a8a6-546ffd57861f tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Waiting for the task: (returnval){ [ 752.070514] env[69994]: value = "task-3241607" [ 752.070514] env[69994]: _type = "Task" [ 752.070514] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.080935] env[69994]: DEBUG oslo_vmware.api [None req-c78d3c1c-0a6c-4f60-a8a6-546ffd57861f tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Task: {'id': task-3241607, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.106258] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f61c164b-b3ce-4135-99cf-7a27e9af1b83 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Acquiring lock "b003b7c2-e754-440e-8a65-13c5e9c68cd5" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 752.106258] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f61c164b-b3ce-4135-99cf-7a27e9af1b83 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Lock "b003b7c2-e754-440e-8a65-13c5e9c68cd5" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 752.139885] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "refresh_cache-5f672fd4-b96f-4506-aa1e-96692a00cb43" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.139885] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquired lock "refresh_cache-5f672fd4-b96f-4506-aa1e-96692a00cb43" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 752.139885] env[69994]: DEBUG nova.network.neutron [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 752.380837] env[69994]: DEBUG oslo_concurrency.lockutils [None req-693c7a30-9f17-4086-b5aa-5d9652f98fd0 tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Lock "289cbcc2-cd8f-4c4f-9169-a897f5527de1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.933s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 752.415673] env[69994]: DEBUG oslo_concurrency.lockutils [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.859s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 752.415874] env[69994]: DEBUG nova.compute.manager [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 752.422044] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 35.730s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 752.467183] env[69994]: DEBUG oslo_vmware.api [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]529b6f71-6263-066a-aa0c-dc8087c06845, 'name': SearchDatastore_Task, 'duration_secs': 0.028989} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.467600] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 752.468085] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 3c2c8a40-919d-4280-b9be-f8d95b1a263e/3c2c8a40-919d-4280-b9be-f8d95b1a263e.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 752.468407] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e7712724-dec2-49e2-a565-038c983bf1e2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.479988] env[69994]: DEBUG oslo_vmware.api [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Waiting for the task: (returnval){ [ 752.479988] env[69994]: value = "task-3241608" [ 752.479988] env[69994]: _type = "Task" [ 752.479988] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.497999] env[69994]: DEBUG oslo_vmware.api [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Task: {'id': task-3241608, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.582094] env[69994]: DEBUG oslo_vmware.api [None req-c78d3c1c-0a6c-4f60-a8a6-546ffd57861f tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Task: {'id': task-3241607, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.396107} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.585036] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c78d3c1c-0a6c-4f60-a8a6-546ffd57861f tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 752.585036] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c78d3c1c-0a6c-4f60-a8a6-546ffd57861f tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 752.585036] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c78d3c1c-0a6c-4f60-a8a6-546ffd57861f tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 752.585036] env[69994]: INFO nova.compute.manager [None req-c78d3c1c-0a6c-4f60-a8a6-546ffd57861f tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Took 1.14 seconds to destroy the instance on the hypervisor. [ 752.585036] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c78d3c1c-0a6c-4f60-a8a6-546ffd57861f tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 752.585402] env[69994]: DEBUG nova.compute.manager [-] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 752.585402] env[69994]: DEBUG nova.network.neutron [-] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 752.590188] env[69994]: DEBUG nova.network.neutron [req-680f58b5-2576-46d5-84b3-a6bf73865bfd req-023d7ad3-7da5-47f4-a7bf-c743297ce027 service nova] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Updated VIF entry in instance network info cache for port b5372a7b-7de1-4258-959a-ce83afa3070b. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 752.591161] env[69994]: DEBUG nova.network.neutron [req-680f58b5-2576-46d5-84b3-a6bf73865bfd req-023d7ad3-7da5-47f4-a7bf-c743297ce027 service nova] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Updating instance_info_cache with network_info: [{"id": "b5372a7b-7de1-4258-959a-ce83afa3070b", "address": "fa:16:3e:15:56:14", "network": {"id": "b2531c58-bc24-425c-abbb-1dc5508ce7da", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1416309400-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7668e2fbee0a4948a0cbdbd964e764e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3753f451-fa23-4988-9361-074fb0bd3fd4", "external-id": "nsx-vlan-transportzone-440", "segmentation_id": 440, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5372a7b-7d", "ovs_interfaceid": "b5372a7b-7de1-4258-959a-ce83afa3070b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.614429] env[69994]: DEBUG nova.compute.utils [None req-f61c164b-b3ce-4135-99cf-7a27e9af1b83 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 752.648116] env[69994]: DEBUG nova.network.neutron [-] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.723725] env[69994]: DEBUG nova.network.neutron [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 752.886242] env[69994]: DEBUG nova.compute.manager [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 752.925583] env[69994]: DEBUG nova.compute.utils [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 752.941113] env[69994]: DEBUG nova.compute.manager [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 752.944427] env[69994]: DEBUG nova.compute.manager [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 752.944659] env[69994]: DEBUG nova.network.neutron [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 753.005874] env[69994]: DEBUG oslo_vmware.api [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Task: {'id': task-3241608, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.008266] env[69994]: DEBUG nova.policy [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '332c174655374c799bb181a29701473e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8fb027b5b61c43cdbac3c89eb1e0f2a1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 753.071276] env[69994]: DEBUG nova.network.neutron [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Updating instance_info_cache with network_info: [{"id": "42c8060b-e0e2-4cd5-acdb-812565e57ee8", "address": "fa:16:3e:2a:8c:8a", "network": {"id": "14e5b992-c393-4acc-ab6d-ad5983fe2729", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1321568807-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee188ea80c9847188df8b8482b7c6ec7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42c8060b-e0", "ovs_interfaceid": "42c8060b-e0e2-4cd5-acdb-812565e57ee8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.095034] env[69994]: DEBUG oslo_concurrency.lockutils [req-680f58b5-2576-46d5-84b3-a6bf73865bfd req-023d7ad3-7da5-47f4-a7bf-c743297ce027 service nova] Releasing lock "refresh_cache-3c2c8a40-919d-4280-b9be-f8d95b1a263e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 753.118504] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f61c164b-b3ce-4135-99cf-7a27e9af1b83 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Lock "b003b7c2-e754-440e-8a65-13c5e9c68cd5" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.012s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 753.150867] env[69994]: INFO nova.compute.manager [-] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Took 2.57 seconds to deallocate network for instance. [ 753.176213] env[69994]: DEBUG nova.compute.manager [req-80e95eea-1d24-4016-91ef-06c3da0d3b47 req-188b2ba5-6bc8-44ce-9ea3-1cda4a7ccbb5 service nova] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Received event network-vif-plugged-42c8060b-e0e2-4cd5-acdb-812565e57ee8 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 753.176449] env[69994]: DEBUG oslo_concurrency.lockutils [req-80e95eea-1d24-4016-91ef-06c3da0d3b47 req-188b2ba5-6bc8-44ce-9ea3-1cda4a7ccbb5 service nova] Acquiring lock "5f672fd4-b96f-4506-aa1e-96692a00cb43-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 753.176679] env[69994]: DEBUG oslo_concurrency.lockutils [req-80e95eea-1d24-4016-91ef-06c3da0d3b47 req-188b2ba5-6bc8-44ce-9ea3-1cda4a7ccbb5 service nova] Lock "5f672fd4-b96f-4506-aa1e-96692a00cb43-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 753.176870] env[69994]: DEBUG oslo_concurrency.lockutils [req-80e95eea-1d24-4016-91ef-06c3da0d3b47 req-188b2ba5-6bc8-44ce-9ea3-1cda4a7ccbb5 service nova] Lock "5f672fd4-b96f-4506-aa1e-96692a00cb43-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 753.178171] env[69994]: DEBUG nova.compute.manager [req-80e95eea-1d24-4016-91ef-06c3da0d3b47 req-188b2ba5-6bc8-44ce-9ea3-1cda4a7ccbb5 service nova] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] No waiting events found dispatching network-vif-plugged-42c8060b-e0e2-4cd5-acdb-812565e57ee8 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 753.178171] env[69994]: WARNING nova.compute.manager [req-80e95eea-1d24-4016-91ef-06c3da0d3b47 req-188b2ba5-6bc8-44ce-9ea3-1cda4a7ccbb5 service nova] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Received unexpected event network-vif-plugged-42c8060b-e0e2-4cd5-acdb-812565e57ee8 for instance with vm_state building and task_state spawning. [ 753.178171] env[69994]: DEBUG nova.compute.manager [req-80e95eea-1d24-4016-91ef-06c3da0d3b47 req-188b2ba5-6bc8-44ce-9ea3-1cda4a7ccbb5 service nova] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Received event network-changed-42c8060b-e0e2-4cd5-acdb-812565e57ee8 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 753.178171] env[69994]: DEBUG nova.compute.manager [req-80e95eea-1d24-4016-91ef-06c3da0d3b47 req-188b2ba5-6bc8-44ce-9ea3-1cda4a7ccbb5 service nova] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Refreshing instance network info cache due to event network-changed-42c8060b-e0e2-4cd5-acdb-812565e57ee8. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 753.178377] env[69994]: DEBUG oslo_concurrency.lockutils [req-80e95eea-1d24-4016-91ef-06c3da0d3b47 req-188b2ba5-6bc8-44ce-9ea3-1cda4a7ccbb5 service nova] Acquiring lock "refresh_cache-5f672fd4-b96f-4506-aa1e-96692a00cb43" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.409216] env[69994]: DEBUG nova.network.neutron [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Successfully created port: 5d9621bb-8ee0-4885-a42e-d68e4c759211 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 753.419839] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 753.486940] env[69994]: WARNING nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 753.486940] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 15d17772-ac57-49a3-b261-bf49b902f658 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.486940] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 53a8714c-50f7-4990-a3d9-86f8fc908d03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.486940] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance b003b7c2-e754-440e-8a65-13c5e9c68cd5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.487178] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 7e7953f7-ed5d-4515-9181-93d343ad772d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.487178] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance c47c26c8-3f7f-436b-95aa-0bd08d41e62b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.487178] env[69994]: WARNING nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance aeb7928a-8307-49e7-b019-a4c674e6369a is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 753.487178] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 1d548f54-4ffa-4299-9212-717350558ad4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.487323] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 45a8dced-6c49-441c-92e2-ee323ed8753c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.487323] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance dbad6bed-64ba-4dfd-abad-c0b2c775ba2c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.487323] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance cef66a67-e3ac-40dc-a8a4-0375bd64c484 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.488707] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.489216] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance e4013007-fd79-4d70-a9d1-70a4c621c0ea actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.489216] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 153f0ead-6e2f-4077-b86a-00d3a1114fed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.489216] env[69994]: WARNING nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 493c2d85-eef5-44ae-acfc-2744685135ca is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 753.489398] env[69994]: WARNING nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance e8b4640f-302d-43cd-a654-c42f9cb34766 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 753.489398] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance ce6f9a88-faa8-442e-8b48-64979dd2d03e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.489522] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 87473dd1-458d-4ef4-a1bd-7e653e509ea4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.489664] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance e8caf244-413b-49bb-bdff-79aca0ccbc2b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.489791] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 744fe018-d12c-44c2-98f1-c11fbfffc98e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.490522] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 289cbcc2-cd8f-4c4f-9169-a897f5527de1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.490522] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 3c2c8a40-919d-4280-b9be-f8d95b1a263e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.490522] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 5f672fd4-b96f-4506-aa1e-96692a00cb43 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.490522] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 558ee84a-731b-4cb1-967d-cf84c8d39718 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.506774] env[69994]: DEBUG oslo_vmware.api [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Task: {'id': task-3241608, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.616232} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.507858] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 3c2c8a40-919d-4280-b9be-f8d95b1a263e/3c2c8a40-919d-4280-b9be-f8d95b1a263e.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 753.508084] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 753.508414] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-88743275-2efc-4e64-ae42-4b1aab62253f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.517934] env[69994]: DEBUG oslo_vmware.api [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Waiting for the task: (returnval){ [ 753.517934] env[69994]: value = "task-3241609" [ 753.517934] env[69994]: _type = "Task" [ 753.517934] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.535040] env[69994]: DEBUG oslo_vmware.api [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Task: {'id': task-3241609, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.574181] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Releasing lock "refresh_cache-5f672fd4-b96f-4506-aa1e-96692a00cb43" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 753.575081] env[69994]: DEBUG nova.compute.manager [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Instance network_info: |[{"id": "42c8060b-e0e2-4cd5-acdb-812565e57ee8", "address": "fa:16:3e:2a:8c:8a", "network": {"id": "14e5b992-c393-4acc-ab6d-ad5983fe2729", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1321568807-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee188ea80c9847188df8b8482b7c6ec7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42c8060b-e0", "ovs_interfaceid": "42c8060b-e0e2-4cd5-acdb-812565e57ee8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 753.575081] env[69994]: DEBUG oslo_concurrency.lockutils [req-80e95eea-1d24-4016-91ef-06c3da0d3b47 req-188b2ba5-6bc8-44ce-9ea3-1cda4a7ccbb5 service nova] Acquired lock "refresh_cache-5f672fd4-b96f-4506-aa1e-96692a00cb43" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 753.575230] env[69994]: DEBUG nova.network.neutron [req-80e95eea-1d24-4016-91ef-06c3da0d3b47 req-188b2ba5-6bc8-44ce-9ea3-1cda4a7ccbb5 service nova] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Refreshing network info cache for port 42c8060b-e0e2-4cd5-acdb-812565e57ee8 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 753.580019] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2a:8c:8a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e31264e2-3e0a-4dfb-ba1f-6389d7d47548', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '42c8060b-e0e2-4cd5-acdb-812565e57ee8', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 753.585819] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Creating folder: Project (ee188ea80c9847188df8b8482b7c6ec7). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 753.587945] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-89499090-6648-49d2-b0d1-38ca87dd29b9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.591085] env[69994]: DEBUG nova.network.neutron [-] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.605551] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Created folder: Project (ee188ea80c9847188df8b8482b7c6ec7) in parent group-v647729. [ 753.605551] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Creating folder: Instances. Parent ref: group-v647847. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 753.605643] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-66485693-4625-464a-83cd-7e8b0fb5a997 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.621951] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Created folder: Instances in parent group-v647847. [ 753.622734] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 753.623128] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 753.623525] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-acd0ebc4-dc5f-43b0-9ecd-ed1e65190eb1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.653863] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 753.653863] env[69994]: value = "task-3241612" [ 753.653863] env[69994]: _type = "Task" [ 753.653863] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.659192] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a8ad7306-023f-4618-bd7d-525ece4c7641 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 753.670462] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241612, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.957911] env[69994]: DEBUG nova.compute.manager [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 754.000482] env[69994]: DEBUG nova.virt.hardware [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 754.000903] env[69994]: DEBUG nova.virt.hardware [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 754.001701] env[69994]: DEBUG nova.virt.hardware [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 754.002124] env[69994]: DEBUG nova.virt.hardware [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 754.002461] env[69994]: DEBUG nova.virt.hardware [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 754.003188] env[69994]: DEBUG nova.virt.hardware [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 754.004699] env[69994]: DEBUG nova.virt.hardware [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 754.005112] env[69994]: DEBUG nova.virt.hardware [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 754.005676] env[69994]: DEBUG nova.virt.hardware [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 754.006033] env[69994]: DEBUG nova.virt.hardware [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 754.006387] env[69994]: DEBUG nova.virt.hardware [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 754.008516] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 4dbf53e0-caa1-41f4-8376-dfba8d8567cd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 754.014057] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f05bbc-83cd-49de-90fe-e349aa805982 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.030364] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e8d5a86-a864-48f7-93d8-bc7877a1d8dc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.039811] env[69994]: DEBUG oslo_vmware.api [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Task: {'id': task-3241609, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.169233} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.040837] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 754.042282] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3b7d08e-e4aa-4cde-953c-86ec03f0d186 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.081507] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] 3c2c8a40-919d-4280-b9be-f8d95b1a263e/3c2c8a40-919d-4280-b9be-f8d95b1a263e.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 754.082259] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e944c06f-b4db-498b-b0a0-f015c2f59b56 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.102270] env[69994]: INFO nova.compute.manager [-] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Took 1.52 seconds to deallocate network for instance. [ 754.111241] env[69994]: DEBUG oslo_vmware.api [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Waiting for the task: (returnval){ [ 754.111241] env[69994]: value = "task-3241613" [ 754.111241] env[69994]: _type = "Task" [ 754.111241] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.122688] env[69994]: DEBUG oslo_vmware.api [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Task: {'id': task-3241613, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.163731] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241612, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.250426] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f61c164b-b3ce-4135-99cf-7a27e9af1b83 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Acquiring lock "b003b7c2-e754-440e-8a65-13c5e9c68cd5" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 754.250687] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f61c164b-b3ce-4135-99cf-7a27e9af1b83 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Lock "b003b7c2-e754-440e-8a65-13c5e9c68cd5" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 754.250950] env[69994]: INFO nova.compute.manager [None req-f61c164b-b3ce-4135-99cf-7a27e9af1b83 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Attaching volume 3289f92c-da6a-4c4d-b76c-70d8912e0349 to /dev/sdb [ 754.293794] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb7f862-9f93-4566-80b1-2eff8b16997d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.309989] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83267dbc-8038-4746-bfec-5e3cda23c988 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.326532] env[69994]: DEBUG nova.virt.block_device [None req-f61c164b-b3ce-4135-99cf-7a27e9af1b83 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Updating existing volume attachment record: b47432f7-4661-463d-b923-06ba456b17a5 {{(pid=69994) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 754.519660] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 214b3508-6fb9-455e-be6b-bd9f6902b7ae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 754.611072] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c78d3c1c-0a6c-4f60-a8a6-546ffd57861f tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 754.622069] env[69994]: DEBUG oslo_vmware.api [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Task: {'id': task-3241613, 'name': ReconfigVM_Task, 'duration_secs': 0.393562} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.622069] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Reconfigured VM instance instance-00000025 to attach disk [datastore1] 3c2c8a40-919d-4280-b9be-f8d95b1a263e/3c2c8a40-919d-4280-b9be-f8d95b1a263e.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 754.622850] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1467077b-ea8c-4c64-b286-7d2a3eae3d0e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.630055] env[69994]: DEBUG oslo_vmware.api [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Waiting for the task: (returnval){ [ 754.630055] env[69994]: value = "task-3241615" [ 754.630055] env[69994]: _type = "Task" [ 754.630055] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.639867] env[69994]: DEBUG oslo_vmware.api [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Task: {'id': task-3241615, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.663273] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241612, 'name': CreateVM_Task, 'duration_secs': 0.523976} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.663505] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 754.664354] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.664650] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 754.664966] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 754.665275] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5339b42-b3b3-4bab-add2-a5f109518cd6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.671521] env[69994]: DEBUG oslo_vmware.api [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 754.671521] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52079cfb-3c28-a67d-84b1-6dea168b83bc" [ 754.671521] env[69994]: _type = "Task" [ 754.671521] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.683570] env[69994]: DEBUG oslo_vmware.api [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52079cfb-3c28-a67d-84b1-6dea168b83bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.793275] env[69994]: DEBUG nova.network.neutron [req-80e95eea-1d24-4016-91ef-06c3da0d3b47 req-188b2ba5-6bc8-44ce-9ea3-1cda4a7ccbb5 service nova] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Updated VIF entry in instance network info cache for port 42c8060b-e0e2-4cd5-acdb-812565e57ee8. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 754.793275] env[69994]: DEBUG nova.network.neutron [req-80e95eea-1d24-4016-91ef-06c3da0d3b47 req-188b2ba5-6bc8-44ce-9ea3-1cda4a7ccbb5 service nova] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Updating instance_info_cache with network_info: [{"id": "42c8060b-e0e2-4cd5-acdb-812565e57ee8", "address": "fa:16:3e:2a:8c:8a", "network": {"id": "14e5b992-c393-4acc-ab6d-ad5983fe2729", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1321568807-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee188ea80c9847188df8b8482b7c6ec7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42c8060b-e0", "ovs_interfaceid": "42c8060b-e0e2-4cd5-acdb-812565e57ee8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.025900] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 0b284e71-7af2-4782-b950-4f7eac5221a4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 755.083397] env[69994]: DEBUG nova.network.neutron [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Successfully updated port: 5d9621bb-8ee0-4885-a42e-d68e4c759211 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 755.141809] env[69994]: DEBUG oslo_vmware.api [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Task: {'id': task-3241615, 'name': Rename_Task, 'duration_secs': 0.19156} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.142142] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 755.142410] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-90b0c2cc-2ba8-451e-92ce-4bcce596fd11 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.151256] env[69994]: DEBUG oslo_vmware.api [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Waiting for the task: (returnval){ [ 755.151256] env[69994]: value = "task-3241618" [ 755.151256] env[69994]: _type = "Task" [ 755.151256] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.162041] env[69994]: DEBUG oslo_vmware.api [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Task: {'id': task-3241618, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.183151] env[69994]: DEBUG oslo_vmware.api [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52079cfb-3c28-a67d-84b1-6dea168b83bc, 'name': SearchDatastore_Task, 'duration_secs': 0.018559} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.183499] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 755.183740] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 755.183979] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 755.184143] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 755.184354] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 755.184593] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3cecae6c-10bc-4cf4-948c-04720024aeab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.197876] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 755.197876] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 755.198959] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2262a46-0c1c-47c8-b857-7dc668f12ffe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.205141] env[69994]: DEBUG oslo_vmware.api [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 755.205141] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52ed2baf-6085-548b-edb0-239b1bfc9607" [ 755.205141] env[69994]: _type = "Task" [ 755.205141] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.215257] env[69994]: DEBUG oslo_vmware.api [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52ed2baf-6085-548b-edb0-239b1bfc9607, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.298641] env[69994]: DEBUG oslo_concurrency.lockutils [req-80e95eea-1d24-4016-91ef-06c3da0d3b47 req-188b2ba5-6bc8-44ce-9ea3-1cda4a7ccbb5 service nova] Releasing lock "refresh_cache-5f672fd4-b96f-4506-aa1e-96692a00cb43" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 755.300167] env[69994]: DEBUG nova.compute.manager [req-80e95eea-1d24-4016-91ef-06c3da0d3b47 req-188b2ba5-6bc8-44ce-9ea3-1cda4a7ccbb5 service nova] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Received event network-vif-deleted-bcae7796-2595-4bff-96c1-d85a7cba05d8 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 755.387425] env[69994]: DEBUG nova.compute.manager [req-1da91dba-6afe-40fe-951b-b26ee4235c66 req-ef70cc3a-9975-4b20-ba16-7bf2a3619e26 service nova] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Received event network-vif-plugged-5d9621bb-8ee0-4885-a42e-d68e4c759211 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 755.387684] env[69994]: DEBUG oslo_concurrency.lockutils [req-1da91dba-6afe-40fe-951b-b26ee4235c66 req-ef70cc3a-9975-4b20-ba16-7bf2a3619e26 service nova] Acquiring lock "558ee84a-731b-4cb1-967d-cf84c8d39718-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 755.387897] env[69994]: DEBUG oslo_concurrency.lockutils [req-1da91dba-6afe-40fe-951b-b26ee4235c66 req-ef70cc3a-9975-4b20-ba16-7bf2a3619e26 service nova] Lock "558ee84a-731b-4cb1-967d-cf84c8d39718-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 755.388107] env[69994]: DEBUG oslo_concurrency.lockutils [req-1da91dba-6afe-40fe-951b-b26ee4235c66 req-ef70cc3a-9975-4b20-ba16-7bf2a3619e26 service nova] Lock "558ee84a-731b-4cb1-967d-cf84c8d39718-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 755.388287] env[69994]: DEBUG nova.compute.manager [req-1da91dba-6afe-40fe-951b-b26ee4235c66 req-ef70cc3a-9975-4b20-ba16-7bf2a3619e26 service nova] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] No waiting events found dispatching network-vif-plugged-5d9621bb-8ee0-4885-a42e-d68e4c759211 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 755.388474] env[69994]: WARNING nova.compute.manager [req-1da91dba-6afe-40fe-951b-b26ee4235c66 req-ef70cc3a-9975-4b20-ba16-7bf2a3619e26 service nova] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Received unexpected event network-vif-plugged-5d9621bb-8ee0-4885-a42e-d68e4c759211 for instance with vm_state building and task_state spawning. [ 755.439321] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8df7e3ad-b1ed-4f1d-932e-3ada58f4cfc2 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Acquiring lock "53a8714c-50f7-4990-a3d9-86f8fc908d03" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 755.440280] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8df7e3ad-b1ed-4f1d-932e-3ada58f4cfc2 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Lock "53a8714c-50f7-4990-a3d9-86f8fc908d03" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 755.440280] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8df7e3ad-b1ed-4f1d-932e-3ada58f4cfc2 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Acquiring lock "53a8714c-50f7-4990-a3d9-86f8fc908d03-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 755.440280] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8df7e3ad-b1ed-4f1d-932e-3ada58f4cfc2 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Lock "53a8714c-50f7-4990-a3d9-86f8fc908d03-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 755.440280] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8df7e3ad-b1ed-4f1d-932e-3ada58f4cfc2 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Lock "53a8714c-50f7-4990-a3d9-86f8fc908d03-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 755.444037] env[69994]: INFO nova.compute.manager [None req-8df7e3ad-b1ed-4f1d-932e-3ada58f4cfc2 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Terminating instance [ 755.529775] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance d4f87534-813e-4ff6-8b1f-ee23cb0b8e80 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 755.586405] env[69994]: DEBUG oslo_concurrency.lockutils [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Acquiring lock "refresh_cache-558ee84a-731b-4cb1-967d-cf84c8d39718" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 755.586728] env[69994]: DEBUG oslo_concurrency.lockutils [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Acquired lock "refresh_cache-558ee84a-731b-4cb1-967d-cf84c8d39718" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 755.586984] env[69994]: DEBUG nova.network.neutron [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 755.668553] env[69994]: DEBUG oslo_vmware.api [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Task: {'id': task-3241618, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.724977] env[69994]: DEBUG oslo_vmware.api [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52ed2baf-6085-548b-edb0-239b1bfc9607, 'name': SearchDatastore_Task, 'duration_secs': 0.023374} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.727186] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2921c2fc-75f4-47bb-bb01-315c03ea9231 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.737256] env[69994]: DEBUG oslo_vmware.api [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 755.737256] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5200116e-29e8-f61d-fde0-19ef3c2d999a" [ 755.737256] env[69994]: _type = "Task" [ 755.737256] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.747570] env[69994]: DEBUG oslo_vmware.api [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5200116e-29e8-f61d-fde0-19ef3c2d999a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.786642] env[69994]: DEBUG nova.compute.manager [req-944822fa-9aaa-49fb-a034-668c10c2a985 req-540e486a-a3cb-4cc5-b163-dda9fea1c776 service nova] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Received event network-vif-deleted-18ed1bc7-f241-4d6e-83f9-4df1b8b70c45 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 755.786958] env[69994]: DEBUG nova.compute.manager [req-944822fa-9aaa-49fb-a034-668c10c2a985 req-540e486a-a3cb-4cc5-b163-dda9fea1c776 service nova] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Received event network-changed-50763b02-561b-4c13-8a91-c7e639f09715 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 755.787272] env[69994]: DEBUG nova.compute.manager [req-944822fa-9aaa-49fb-a034-668c10c2a985 req-540e486a-a3cb-4cc5-b163-dda9fea1c776 service nova] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Refreshing instance network info cache due to event network-changed-50763b02-561b-4c13-8a91-c7e639f09715. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 755.787612] env[69994]: DEBUG oslo_concurrency.lockutils [req-944822fa-9aaa-49fb-a034-668c10c2a985 req-540e486a-a3cb-4cc5-b163-dda9fea1c776 service nova] Acquiring lock "refresh_cache-289cbcc2-cd8f-4c4f-9169-a897f5527de1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 755.787822] env[69994]: DEBUG oslo_concurrency.lockutils [req-944822fa-9aaa-49fb-a034-668c10c2a985 req-540e486a-a3cb-4cc5-b163-dda9fea1c776 service nova] Acquired lock "refresh_cache-289cbcc2-cd8f-4c4f-9169-a897f5527de1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 755.788229] env[69994]: DEBUG nova.network.neutron [req-944822fa-9aaa-49fb-a034-668c10c2a985 req-540e486a-a3cb-4cc5-b163-dda9fea1c776 service nova] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Refreshing network info cache for port 50763b02-561b-4c13-8a91-c7e639f09715 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 755.948157] env[69994]: DEBUG nova.compute.manager [None req-8df7e3ad-b1ed-4f1d-932e-3ada58f4cfc2 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 755.948157] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8df7e3ad-b1ed-4f1d-932e-3ada58f4cfc2 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 755.948157] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f568194-3e9b-4a04-ad68-05d0564f906f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.958764] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8df7e3ad-b1ed-4f1d-932e-3ada58f4cfc2 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 755.959197] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ef0362d3-81f8-4737-95f0-731b8191a8e9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.969029] env[69994]: DEBUG oslo_vmware.api [None req-8df7e3ad-b1ed-4f1d-932e-3ada58f4cfc2 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Waiting for the task: (returnval){ [ 755.969029] env[69994]: value = "task-3241619" [ 755.969029] env[69994]: _type = "Task" [ 755.969029] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.979282] env[69994]: DEBUG oslo_vmware.api [None req-8df7e3ad-b1ed-4f1d-932e-3ada58f4cfc2 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Task: {'id': task-3241619, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.032470] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 86e514bb-8b47-4605-bd85-55c6c9874320 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 756.136454] env[69994]: DEBUG nova.network.neutron [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 756.172997] env[69994]: DEBUG oslo_vmware.api [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Task: {'id': task-3241618, 'name': PowerOnVM_Task, 'duration_secs': 0.617359} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.173381] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 756.175414] env[69994]: INFO nova.compute.manager [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Took 9.69 seconds to spawn the instance on the hypervisor. [ 756.175414] env[69994]: DEBUG nova.compute.manager [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 756.175953] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2116a7d2-7261-4371-bfbe-cc898339b7a0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.250104] env[69994]: DEBUG oslo_vmware.api [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5200116e-29e8-f61d-fde0-19ef3c2d999a, 'name': SearchDatastore_Task, 'duration_secs': 0.015648} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.253634] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 756.254051] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 5f672fd4-b96f-4506-aa1e-96692a00cb43/5f672fd4-b96f-4506-aa1e-96692a00cb43.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 756.254473] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-09de29ee-9500-42a8-b858-58158ab0ead3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.265613] env[69994]: DEBUG oslo_vmware.api [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 756.265613] env[69994]: value = "task-3241620" [ 756.265613] env[69994]: _type = "Task" [ 756.265613] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.276365] env[69994]: DEBUG oslo_vmware.api [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3241620, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.294683] env[69994]: DEBUG oslo_concurrency.lockutils [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Acquiring lock "8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 756.294683] env[69994]: DEBUG oslo_concurrency.lockutils [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Lock "8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 756.400019] env[69994]: DEBUG nova.network.neutron [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Updating instance_info_cache with network_info: [{"id": "5d9621bb-8ee0-4885-a42e-d68e4c759211", "address": "fa:16:3e:6e:fa:b2", "network": {"id": "0edf701f-8d15-4cdd-a234-ebea64cfa425", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-580718484-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8fb027b5b61c43cdbac3c89eb1e0f2a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d9621bb-8e", "ovs_interfaceid": "5d9621bb-8ee0-4885-a42e-d68e4c759211", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.481783] env[69994]: DEBUG oslo_vmware.api [None req-8df7e3ad-b1ed-4f1d-932e-3ada58f4cfc2 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Task: {'id': task-3241619, 'name': PowerOffVM_Task, 'duration_secs': 0.276066} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.482130] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8df7e3ad-b1ed-4f1d-932e-3ada58f4cfc2 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 756.482219] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8df7e3ad-b1ed-4f1d-932e-3ada58f4cfc2 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 756.482459] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-55cdca00-2638-4e6a-8c8c-5f0c1120550e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.537288] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance ed662f67-be0e-4f19-bb8a-6af39b4d348c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 756.563116] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8df7e3ad-b1ed-4f1d-932e-3ada58f4cfc2 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 756.563415] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8df7e3ad-b1ed-4f1d-932e-3ada58f4cfc2 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 756.563516] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-8df7e3ad-b1ed-4f1d-932e-3ada58f4cfc2 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Deleting the datastore file [datastore2] 53a8714c-50f7-4990-a3d9-86f8fc908d03 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 756.563766] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b2ef53e6-b53d-4c9f-898d-a74855ba8c3b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.571986] env[69994]: DEBUG oslo_vmware.api [None req-8df7e3ad-b1ed-4f1d-932e-3ada58f4cfc2 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Waiting for the task: (returnval){ [ 756.571986] env[69994]: value = "task-3241622" [ 756.571986] env[69994]: _type = "Task" [ 756.571986] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.580796] env[69994]: DEBUG oslo_vmware.api [None req-8df7e3ad-b1ed-4f1d-932e-3ada58f4cfc2 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Task: {'id': task-3241622, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.581677] env[69994]: DEBUG nova.network.neutron [req-944822fa-9aaa-49fb-a034-668c10c2a985 req-540e486a-a3cb-4cc5-b163-dda9fea1c776 service nova] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Updated VIF entry in instance network info cache for port 50763b02-561b-4c13-8a91-c7e639f09715. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 756.582037] env[69994]: DEBUG nova.network.neutron [req-944822fa-9aaa-49fb-a034-668c10c2a985 req-540e486a-a3cb-4cc5-b163-dda9fea1c776 service nova] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Updating instance_info_cache with network_info: [{"id": "50763b02-561b-4c13-8a91-c7e639f09715", "address": "fa:16:3e:e2:66:27", "network": {"id": "3dc045d7-eb28-46c5-be05-cdcca879d533", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-956262977-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "114596d74d9b40248f385df6e4644aaa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cc0a33d-17c0-4b87-b48f-413a87a4cc6a", "external-id": "nsx-vlan-transportzone-865", "segmentation_id": 865, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50763b02-56", "ovs_interfaceid": "50763b02-561b-4c13-8a91-c7e639f09715", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.705609] env[69994]: INFO nova.compute.manager [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Took 54.04 seconds to build instance. [ 756.777401] env[69994]: DEBUG oslo_vmware.api [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3241620, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.908843] env[69994]: DEBUG oslo_concurrency.lockutils [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Releasing lock "refresh_cache-558ee84a-731b-4cb1-967d-cf84c8d39718" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 756.908843] env[69994]: DEBUG nova.compute.manager [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Instance network_info: |[{"id": "5d9621bb-8ee0-4885-a42e-d68e4c759211", "address": "fa:16:3e:6e:fa:b2", "network": {"id": "0edf701f-8d15-4cdd-a234-ebea64cfa425", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-580718484-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8fb027b5b61c43cdbac3c89eb1e0f2a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d9621bb-8e", "ovs_interfaceid": "5d9621bb-8ee0-4885-a42e-d68e4c759211", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 756.909182] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:fa:b2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5d9621bb-8ee0-4885-a42e-d68e4c759211', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 756.917854] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 756.918489] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 756.919082] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9661000b-cd69-474f-a1f3-b78d0e700b66 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.954288] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 756.954288] env[69994]: value = "task-3241624" [ 756.954288] env[69994]: _type = "Task" [ 756.954288] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.966858] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241624, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.022542] env[69994]: DEBUG oslo_vmware.rw_handles [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528b36bd-ab55-1e1d-a1b1-aed01571b1e7/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 757.023598] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51e323db-1d99-4770-bcf0-88807719fdfe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.032033] env[69994]: DEBUG oslo_vmware.rw_handles [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528b36bd-ab55-1e1d-a1b1-aed01571b1e7/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 757.032622] env[69994]: ERROR oslo_vmware.rw_handles [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528b36bd-ab55-1e1d-a1b1-aed01571b1e7/disk-0.vmdk due to incomplete transfer. [ 757.032906] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-43624988-ae73-4aa0-808d-867663b9a164 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.045184] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance b99b73e6-3348-4d5d-aa57-f01ace0bfc42 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 757.046890] env[69994]: DEBUG oslo_vmware.rw_handles [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528b36bd-ab55-1e1d-a1b1-aed01571b1e7/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 757.047166] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Uploaded image e0bcc3a4-b58a-42ff-a3b7-a158a5974e40 to the Glance image server {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 757.053253] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Destroying the VM {{(pid=69994) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 757.053925] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-50bff601-ec46-4ca6-858e-bfaf0ef3ebfb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.064163] env[69994]: DEBUG oslo_vmware.api [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Waiting for the task: (returnval){ [ 757.064163] env[69994]: value = "task-3241625" [ 757.064163] env[69994]: _type = "Task" [ 757.064163] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.074392] env[69994]: DEBUG oslo_vmware.api [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241625, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.084299] env[69994]: DEBUG oslo_concurrency.lockutils [req-944822fa-9aaa-49fb-a034-668c10c2a985 req-540e486a-a3cb-4cc5-b163-dda9fea1c776 service nova] Releasing lock "refresh_cache-289cbcc2-cd8f-4c4f-9169-a897f5527de1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 757.084739] env[69994]: DEBUG oslo_vmware.api [None req-8df7e3ad-b1ed-4f1d-932e-3ada58f4cfc2 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Task: {'id': task-3241622, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.445242} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.085039] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-8df7e3ad-b1ed-4f1d-932e-3ada58f4cfc2 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 757.085233] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8df7e3ad-b1ed-4f1d-932e-3ada58f4cfc2 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 757.085418] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8df7e3ad-b1ed-4f1d-932e-3ada58f4cfc2 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 757.085655] env[69994]: INFO nova.compute.manager [None req-8df7e3ad-b1ed-4f1d-932e-3ada58f4cfc2 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Took 1.14 seconds to destroy the instance on the hypervisor. [ 757.085857] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8df7e3ad-b1ed-4f1d-932e-3ada58f4cfc2 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 757.086312] env[69994]: DEBUG nova.compute.manager [-] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 757.086393] env[69994]: DEBUG nova.network.neutron [-] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 757.207795] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5dea494f-e56d-4079-98a6-2e9f4e5b7030 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Lock "3c2c8a40-919d-4280-b9be-f8d95b1a263e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 89.173s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 757.276905] env[69994]: DEBUG oslo_vmware.api [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3241620, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.742895} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.277265] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 5f672fd4-b96f-4506-aa1e-96692a00cb43/5f672fd4-b96f-4506-aa1e-96692a00cb43.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 757.277484] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 757.277750] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f7f4e30b-5ca4-4dfe-942f-46693d6cfd83 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.286697] env[69994]: DEBUG oslo_vmware.api [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 757.286697] env[69994]: value = "task-3241626" [ 757.286697] env[69994]: _type = "Task" [ 757.286697] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.301633] env[69994]: DEBUG oslo_vmware.api [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3241626, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.465710] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241624, 'name': CreateVM_Task, 'duration_secs': 0.443741} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.466112] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 757.468294] env[69994]: DEBUG oslo_concurrency.lockutils [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.468464] env[69994]: DEBUG oslo_concurrency.lockutils [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 757.468779] env[69994]: DEBUG oslo_concurrency.lockutils [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 757.469058] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e66c75eb-6a6c-4359-8b28-8c1dc168d169 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.474898] env[69994]: DEBUG oslo_vmware.api [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for the task: (returnval){ [ 757.474898] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52ff0066-1707-da2c-d70f-e13174e08b7e" [ 757.474898] env[69994]: _type = "Task" [ 757.474898] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.485336] env[69994]: DEBUG oslo_vmware.api [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52ff0066-1707-da2c-d70f-e13174e08b7e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.555337] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance f07750f5-3f1d-4d97-98dc-285ed357cc7e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 757.576929] env[69994]: DEBUG oslo_vmware.api [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241625, 'name': Destroy_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.714552] env[69994]: DEBUG nova.compute.manager [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 757.797524] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5e782d55-03f0-4c9a-834f-e1695ca59d05 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Acquiring lock "3c2c8a40-919d-4280-b9be-f8d95b1a263e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 757.797764] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5e782d55-03f0-4c9a-834f-e1695ca59d05 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Lock "3c2c8a40-919d-4280-b9be-f8d95b1a263e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 757.797957] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5e782d55-03f0-4c9a-834f-e1695ca59d05 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Acquiring lock "3c2c8a40-919d-4280-b9be-f8d95b1a263e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 757.798182] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5e782d55-03f0-4c9a-834f-e1695ca59d05 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Lock "3c2c8a40-919d-4280-b9be-f8d95b1a263e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 757.798353] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5e782d55-03f0-4c9a-834f-e1695ca59d05 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Lock "3c2c8a40-919d-4280-b9be-f8d95b1a263e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 757.799960] env[69994]: DEBUG oslo_vmware.api [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3241626, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083008} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.800223] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 757.800721] env[69994]: INFO nova.compute.manager [None req-5e782d55-03f0-4c9a-834f-e1695ca59d05 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Terminating instance [ 757.802471] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d087ee3-bfdb-479e-bd17-f584cf8d1544 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.830679] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] 5f672fd4-b96f-4506-aa1e-96692a00cb43/5f672fd4-b96f-4506-aa1e-96692a00cb43.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 757.831278] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4a35fa7e-c82a-4919-ae89-ba4999324a35 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.853800] env[69994]: DEBUG oslo_vmware.api [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 757.853800] env[69994]: value = "task-3241627" [ 757.853800] env[69994]: _type = "Task" [ 757.853800] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.863867] env[69994]: DEBUG oslo_vmware.api [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3241627, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.984333] env[69994]: DEBUG nova.compute.manager [req-d5a29151-a6d5-434f-b2ed-ac5080a4dabe req-5fa96fb5-db20-44fb-a305-38088427d020 service nova] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Received event network-changed-5d9621bb-8ee0-4885-a42e-d68e4c759211 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 757.985182] env[69994]: DEBUG nova.compute.manager [req-d5a29151-a6d5-434f-b2ed-ac5080a4dabe req-5fa96fb5-db20-44fb-a305-38088427d020 service nova] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Refreshing instance network info cache due to event network-changed-5d9621bb-8ee0-4885-a42e-d68e4c759211. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 757.985182] env[69994]: DEBUG oslo_concurrency.lockutils [req-d5a29151-a6d5-434f-b2ed-ac5080a4dabe req-5fa96fb5-db20-44fb-a305-38088427d020 service nova] Acquiring lock "refresh_cache-558ee84a-731b-4cb1-967d-cf84c8d39718" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.985182] env[69994]: DEBUG oslo_concurrency.lockutils [req-d5a29151-a6d5-434f-b2ed-ac5080a4dabe req-5fa96fb5-db20-44fb-a305-38088427d020 service nova] Acquired lock "refresh_cache-558ee84a-731b-4cb1-967d-cf84c8d39718" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 757.985182] env[69994]: DEBUG nova.network.neutron [req-d5a29151-a6d5-434f-b2ed-ac5080a4dabe req-5fa96fb5-db20-44fb-a305-38088427d020 service nova] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Refreshing network info cache for port 5d9621bb-8ee0-4885-a42e-d68e4c759211 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 757.994125] env[69994]: DEBUG oslo_vmware.api [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52ff0066-1707-da2c-d70f-e13174e08b7e, 'name': SearchDatastore_Task, 'duration_secs': 0.04065} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.994125] env[69994]: DEBUG oslo_concurrency.lockutils [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 757.994125] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 757.994125] env[69994]: DEBUG oslo_concurrency.lockutils [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.994296] env[69994]: DEBUG oslo_concurrency.lockutils [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 757.994296] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 757.994296] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-34da0d28-3eaa-43fe-beaa-77c2a72b9033 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.014771] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 758.014771] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 758.014771] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0233ebeb-b32b-4a9f-a5bb-7b1e44a6d89a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.023568] env[69994]: DEBUG oslo_vmware.api [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for the task: (returnval){ [ 758.023568] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]527baf63-6ba2-37f1-43e7-39e0ffcba849" [ 758.023568] env[69994]: _type = "Task" [ 758.023568] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.033303] env[69994]: DEBUG oslo_vmware.api [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]527baf63-6ba2-37f1-43e7-39e0ffcba849, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.058910] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance eff21ec5-a51d-4004-9edf-1891f706fe9c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 758.079522] env[69994]: DEBUG oslo_vmware.api [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241625, 'name': Destroy_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.245826] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 758.310701] env[69994]: DEBUG nova.compute.manager [None req-5e782d55-03f0-4c9a-834f-e1695ca59d05 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 758.310968] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5e782d55-03f0-4c9a-834f-e1695ca59d05 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 758.311893] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53b933cc-abb4-4149-aeca-618c9cd1f6c1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.326039] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e782d55-03f0-4c9a-834f-e1695ca59d05 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 758.326039] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7750d873-739a-4992-b067-d052394a4447 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.335334] env[69994]: DEBUG oslo_vmware.api [None req-5e782d55-03f0-4c9a-834f-e1695ca59d05 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Waiting for the task: (returnval){ [ 758.335334] env[69994]: value = "task-3241628" [ 758.335334] env[69994]: _type = "Task" [ 758.335334] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.348024] env[69994]: DEBUG oslo_vmware.api [None req-5e782d55-03f0-4c9a-834f-e1695ca59d05 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Task: {'id': task-3241628, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.365416] env[69994]: DEBUG oslo_vmware.api [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3241627, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.542772] env[69994]: DEBUG oslo_vmware.api [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]527baf63-6ba2-37f1-43e7-39e0ffcba849, 'name': SearchDatastore_Task, 'duration_secs': 0.033533} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.551649] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75312224-4a73-4c97-9d80-cb53b1d417dd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.559813] env[69994]: DEBUG oslo_vmware.api [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for the task: (returnval){ [ 758.559813] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52b354ec-3271-212d-a10e-c2f3837f8142" [ 758.559813] env[69994]: _type = "Task" [ 758.559813] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.562028] env[69994]: DEBUG nova.network.neutron [-] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.563461] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 758.583023] env[69994]: DEBUG oslo_vmware.api [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241625, 'name': Destroy_Task, 'duration_secs': 1.217781} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.588164] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Destroyed the VM [ 758.588436] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Deleting Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 758.588718] env[69994]: DEBUG oslo_vmware.api [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52b354ec-3271-212d-a10e-c2f3837f8142, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.593028] env[69994]: DEBUG nova.compute.manager [req-ae43b37f-f6f8-474f-ba78-d4fd3890010d req-1b36a2db-66f8-41eb-9b14-95c921863bd7 service nova] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Received event network-vif-deleted-be9f669d-36ab-4cbd-a56f-5db33a833aa8 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 758.593028] env[69994]: INFO nova.compute.manager [req-ae43b37f-f6f8-474f-ba78-d4fd3890010d req-1b36a2db-66f8-41eb-9b14-95c921863bd7 service nova] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Neutron deleted interface be9f669d-36ab-4cbd-a56f-5db33a833aa8; detaching it from the instance and deleting it from the info cache [ 758.593028] env[69994]: DEBUG nova.network.neutron [req-ae43b37f-f6f8-474f-ba78-d4fd3890010d req-1b36a2db-66f8-41eb-9b14-95c921863bd7 service nova] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.593028] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-cb5c2bf7-6e2d-4ba2-a215-9f795d622519 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.600037] env[69994]: DEBUG oslo_vmware.api [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Waiting for the task: (returnval){ [ 758.600037] env[69994]: value = "task-3241629" [ 758.600037] env[69994]: _type = "Task" [ 758.600037] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.611206] env[69994]: DEBUG oslo_vmware.api [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241629, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.815027] env[69994]: DEBUG nova.network.neutron [req-d5a29151-a6d5-434f-b2ed-ac5080a4dabe req-5fa96fb5-db20-44fb-a305-38088427d020 service nova] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Updated VIF entry in instance network info cache for port 5d9621bb-8ee0-4885-a42e-d68e4c759211. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 758.815638] env[69994]: DEBUG nova.network.neutron [req-d5a29151-a6d5-434f-b2ed-ac5080a4dabe req-5fa96fb5-db20-44fb-a305-38088427d020 service nova] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Updating instance_info_cache with network_info: [{"id": "5d9621bb-8ee0-4885-a42e-d68e4c759211", "address": "fa:16:3e:6e:fa:b2", "network": {"id": "0edf701f-8d15-4cdd-a234-ebea64cfa425", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-580718484-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8fb027b5b61c43cdbac3c89eb1e0f2a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d9621bb-8e", "ovs_interfaceid": "5d9621bb-8ee0-4885-a42e-d68e4c759211", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.846900] env[69994]: DEBUG oslo_vmware.api [None req-5e782d55-03f0-4c9a-834f-e1695ca59d05 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Task: {'id': task-3241628, 'name': PowerOffVM_Task, 'duration_secs': 0.221121} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.847204] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e782d55-03f0-4c9a-834f-e1695ca59d05 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 758.847375] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5e782d55-03f0-4c9a-834f-e1695ca59d05 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 758.847623] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2abbe2a0-a121-4398-8561-e97976e4f31b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.865108] env[69994]: DEBUG oslo_vmware.api [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3241627, 'name': ReconfigVM_Task, 'duration_secs': 0.728468} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.865392] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Reconfigured VM instance instance-00000026 to attach disk [datastore1] 5f672fd4-b96f-4506-aa1e-96692a00cb43/5f672fd4-b96f-4506-aa1e-96692a00cb43.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 758.866321] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-18ed157a-9dc4-482c-9e36-824b5fd58c43 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.873937] env[69994]: DEBUG oslo_vmware.api [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 758.873937] env[69994]: value = "task-3241631" [ 758.873937] env[69994]: _type = "Task" [ 758.873937] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.882766] env[69994]: DEBUG oslo_vmware.api [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3241631, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.905828] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f61c164b-b3ce-4135-99cf-7a27e9af1b83 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Volume attach. Driver type: vmdk {{(pid=69994) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 758.906160] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f61c164b-b3ce-4135-99cf-7a27e9af1b83 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647851', 'volume_id': '3289f92c-da6a-4c4d-b76c-70d8912e0349', 'name': 'volume-3289f92c-da6a-4c4d-b76c-70d8912e0349', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'b003b7c2-e754-440e-8a65-13c5e9c68cd5', 'attached_at': '', 'detached_at': '', 'volume_id': '3289f92c-da6a-4c4d-b76c-70d8912e0349', 'serial': '3289f92c-da6a-4c4d-b76c-70d8912e0349'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 758.909172] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e35c232-7ecd-4239-9b1f-16212549fe7e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.933771] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5e782d55-03f0-4c9a-834f-e1695ca59d05 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 758.933833] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5e782d55-03f0-4c9a-834f-e1695ca59d05 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 758.935036] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e782d55-03f0-4c9a-834f-e1695ca59d05 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Deleting the datastore file [datastore1] 3c2c8a40-919d-4280-b9be-f8d95b1a263e {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 758.935036] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b446b74f-aedf-4da4-90e0-cc6f5269ef7d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.937760] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d2898617-3ca1-41a2-9632-0169a4810ecb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.965465] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f61c164b-b3ce-4135-99cf-7a27e9af1b83 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Reconfiguring VM instance instance-0000000c to attach disk [datastore2] volume-3289f92c-da6a-4c4d-b76c-70d8912e0349/volume-3289f92c-da6a-4c4d-b76c-70d8912e0349.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 758.967470] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d39b8de-f187-43f3-8e40-13705dc04079 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.981396] env[69994]: DEBUG oslo_vmware.api [None req-5e782d55-03f0-4c9a-834f-e1695ca59d05 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Waiting for the task: (returnval){ [ 758.981396] env[69994]: value = "task-3241632" [ 758.981396] env[69994]: _type = "Task" [ 758.981396] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.994359] env[69994]: DEBUG oslo_vmware.api [None req-5e782d55-03f0-4c9a-834f-e1695ca59d05 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Task: {'id': task-3241632, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.994704] env[69994]: DEBUG oslo_vmware.api [None req-f61c164b-b3ce-4135-99cf-7a27e9af1b83 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Waiting for the task: (returnval){ [ 758.994704] env[69994]: value = "task-3241633" [ 758.994704] env[69994]: _type = "Task" [ 758.994704] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.004229] env[69994]: DEBUG oslo_vmware.api [None req-f61c164b-b3ce-4135-99cf-7a27e9af1b83 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3241633, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.066032] env[69994]: INFO nova.compute.manager [-] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Took 1.98 seconds to deallocate network for instance. [ 759.071702] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance dca638aa-c491-431f-a0e5-d02bd76705ad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 759.073122] env[69994]: DEBUG oslo_vmware.api [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52b354ec-3271-212d-a10e-c2f3837f8142, 'name': SearchDatastore_Task, 'duration_secs': 0.018841} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.076333] env[69994]: DEBUG oslo_concurrency.lockutils [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 759.076609] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 558ee84a-731b-4cb1-967d-cf84c8d39718/558ee84a-731b-4cb1-967d-cf84c8d39718.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 759.076884] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-05c5679d-ccb4-4152-be25-660ccd5dcf1c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.087389] env[69994]: DEBUG oslo_vmware.api [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for the task: (returnval){ [ 759.087389] env[69994]: value = "task-3241634" [ 759.087389] env[69994]: _type = "Task" [ 759.087389] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.100447] env[69994]: DEBUG oslo_vmware.api [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241634, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.100877] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a6102ad6-5a4b-4afe-93f4-d0a582d14d1a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.113563] env[69994]: DEBUG oslo_vmware.api [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241629, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.118857] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eae71ab-f977-4f60-aada-3594caaf1d27 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.158774] env[69994]: DEBUG nova.compute.manager [req-ae43b37f-f6f8-474f-ba78-d4fd3890010d req-1b36a2db-66f8-41eb-9b14-95c921863bd7 service nova] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Detach interface failed, port_id=be9f669d-36ab-4cbd-a56f-5db33a833aa8, reason: Instance 53a8714c-50f7-4990-a3d9-86f8fc908d03 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 759.235926] env[69994]: DEBUG oslo_concurrency.lockutils [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Acquiring lock "6d99c52e-8893-4ad7-8d8e-56bd8c9379b8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 759.236285] env[69994]: DEBUG oslo_concurrency.lockutils [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Lock "6d99c52e-8893-4ad7-8d8e-56bd8c9379b8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 759.319685] env[69994]: DEBUG oslo_concurrency.lockutils [req-d5a29151-a6d5-434f-b2ed-ac5080a4dabe req-5fa96fb5-db20-44fb-a305-38088427d020 service nova] Releasing lock "refresh_cache-558ee84a-731b-4cb1-967d-cf84c8d39718" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 759.387198] env[69994]: DEBUG oslo_vmware.api [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3241631, 'name': Rename_Task, 'duration_secs': 0.166319} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.387198] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 759.387198] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8c835396-024a-41a6-be2e-b8bc0b8a61ee {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.395988] env[69994]: DEBUG oslo_vmware.api [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 759.395988] env[69994]: value = "task-3241635" [ 759.395988] env[69994]: _type = "Task" [ 759.395988] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.408599] env[69994]: DEBUG oslo_vmware.api [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3241635, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.493339] env[69994]: DEBUG oslo_vmware.api [None req-5e782d55-03f0-4c9a-834f-e1695ca59d05 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Task: {'id': task-3241632, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.191558} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.495055] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e782d55-03f0-4c9a-834f-e1695ca59d05 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 759.495055] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5e782d55-03f0-4c9a-834f-e1695ca59d05 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 759.495055] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5e782d55-03f0-4c9a-834f-e1695ca59d05 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 759.495055] env[69994]: INFO nova.compute.manager [None req-5e782d55-03f0-4c9a-834f-e1695ca59d05 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Took 1.18 seconds to destroy the instance on the hypervisor. [ 759.495055] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5e782d55-03f0-4c9a-834f-e1695ca59d05 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 759.495545] env[69994]: DEBUG nova.compute.manager [-] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 759.495545] env[69994]: DEBUG nova.network.neutron [-] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 759.506941] env[69994]: DEBUG oslo_vmware.api [None req-f61c164b-b3ce-4135-99cf-7a27e9af1b83 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3241633, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.574963] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 759.581538] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8df7e3ad-b1ed-4f1d-932e-3ada58f4cfc2 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 759.604353] env[69994]: DEBUG oslo_vmware.api [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241634, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.617418] env[69994]: DEBUG oslo_vmware.api [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241629, 'name': RemoveSnapshot_Task, 'duration_secs': 0.526413} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.617715] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Deleted Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 759.617952] env[69994]: INFO nova.compute.manager [None req-a49ba1a4-bf95-4c1a-a41f-be83d5453cbc tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Took 15.12 seconds to snapshot the instance on the hypervisor. [ 759.907671] env[69994]: DEBUG oslo_vmware.api [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3241635, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.008296] env[69994]: DEBUG oslo_vmware.api [None req-f61c164b-b3ce-4135-99cf-7a27e9af1b83 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3241633, 'name': ReconfigVM_Task, 'duration_secs': 0.750998} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.008603] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f61c164b-b3ce-4135-99cf-7a27e9af1b83 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Reconfigured VM instance instance-0000000c to attach disk [datastore2] volume-3289f92c-da6a-4c4d-b76c-70d8912e0349/volume-3289f92c-da6a-4c4d-b76c-70d8912e0349.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 760.014995] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b18f860e-3462-409f-a530-23f623afc278 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.035293] env[69994]: DEBUG oslo_vmware.api [None req-f61c164b-b3ce-4135-99cf-7a27e9af1b83 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Waiting for the task: (returnval){ [ 760.035293] env[69994]: value = "task-3241636" [ 760.035293] env[69994]: _type = "Task" [ 760.035293] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.045953] env[69994]: DEBUG oslo_vmware.api [None req-f61c164b-b3ce-4135-99cf-7a27e9af1b83 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3241636, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.078409] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 2d812174-d2ad-4fac-8ae5-ffa51d691374 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 760.104268] env[69994]: DEBUG oslo_vmware.api [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241634, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.58291} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.104520] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 558ee84a-731b-4cb1-967d-cf84c8d39718/558ee84a-731b-4cb1-967d-cf84c8d39718.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 760.104734] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 760.104971] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ccea88dd-3bc4-4ebf-b95f-eeb3e43383ee {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.114482] env[69994]: DEBUG oslo_vmware.api [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for the task: (returnval){ [ 760.114482] env[69994]: value = "task-3241637" [ 760.114482] env[69994]: _type = "Task" [ 760.114482] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.128213] env[69994]: DEBUG oslo_vmware.api [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241637, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.301340] env[69994]: DEBUG nova.network.neutron [-] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.413518] env[69994]: DEBUG oslo_vmware.api [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3241635, 'name': PowerOnVM_Task, 'duration_secs': 0.539559} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.413807] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 760.414020] env[69994]: INFO nova.compute.manager [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Took 11.10 seconds to spawn the instance on the hypervisor. [ 760.414208] env[69994]: DEBUG nova.compute.manager [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 760.415048] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6c3cafb-1634-4591-98a7-b220a454a7d5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.545280] env[69994]: DEBUG oslo_vmware.api [None req-f61c164b-b3ce-4135-99cf-7a27e9af1b83 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3241636, 'name': ReconfigVM_Task, 'duration_secs': 0.154993} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.545596] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f61c164b-b3ce-4135-99cf-7a27e9af1b83 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647851', 'volume_id': '3289f92c-da6a-4c4d-b76c-70d8912e0349', 'name': 'volume-3289f92c-da6a-4c4d-b76c-70d8912e0349', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'b003b7c2-e754-440e-8a65-13c5e9c68cd5', 'attached_at': '', 'detached_at': '', 'volume_id': '3289f92c-da6a-4c4d-b76c-70d8912e0349', 'serial': '3289f92c-da6a-4c4d-b76c-70d8912e0349'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 760.581810] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 566522b0-7aa7-4552-9be7-035d742ba394 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 760.614917] env[69994]: DEBUG nova.compute.manager [req-a85ce741-9f9f-4b16-9697-a87d1ffcb648 req-0e6f68cc-60be-4150-a1a4-0e0401ff1cee service nova] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Received event network-vif-deleted-b5372a7b-7de1-4258-959a-ce83afa3070b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 760.625159] env[69994]: DEBUG oslo_vmware.api [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241637, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069034} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.627186] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 760.627186] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa724648-2b98-4449-9373-a7da3c7ac276 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.649758] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Reconfiguring VM instance instance-00000027 to attach disk [datastore2] 558ee84a-731b-4cb1-967d-cf84c8d39718/558ee84a-731b-4cb1-967d-cf84c8d39718.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 760.650073] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-70c0bc55-9cfe-4917-b097-949c7afdacf0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.670968] env[69994]: DEBUG oslo_vmware.api [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for the task: (returnval){ [ 760.670968] env[69994]: value = "task-3241638" [ 760.670968] env[69994]: _type = "Task" [ 760.670968] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.679718] env[69994]: DEBUG oslo_vmware.api [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241638, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.804061] env[69994]: INFO nova.compute.manager [-] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Took 1.31 seconds to deallocate network for instance. [ 760.936740] env[69994]: INFO nova.compute.manager [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Took 54.87 seconds to build instance. [ 761.090202] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance a4544bc9-6935-4825-9b45-2054d2ced330 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 761.090202] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Total usable vcpus: 48, total allocated vcpus: 20 {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 761.090202] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4416MB phys_disk=200GB used_disk=20GB total_vcpus=48 used_vcpus=20 pci_stats=[] {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 761.183099] env[69994]: DEBUG oslo_vmware.api [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241638, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.310069] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5e782d55-03f0-4c9a-834f-e1695ca59d05 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 761.439149] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7953c5a7-1e3b-49e9-b26b-47a08007079f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "5f672fd4-b96f-4506-aa1e-96692a00cb43" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.483s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 761.606728] env[69994]: DEBUG nova.objects.instance [None req-f61c164b-b3ce-4135-99cf-7a27e9af1b83 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Lazy-loading 'flavor' on Instance uuid b003b7c2-e754-440e-8a65-13c5e9c68cd5 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 761.663604] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d892784-4bb2-4edc-93ee-4c7d799a9708 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.675022] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96dc9e7e-ad4b-4040-81d3-ced0b086161f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.683465] env[69994]: DEBUG oslo_vmware.api [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241638, 'name': ReconfigVM_Task, 'duration_secs': 0.778057} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.707592] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Reconfigured VM instance instance-00000027 to attach disk [datastore2] 558ee84a-731b-4cb1-967d-cf84c8d39718/558ee84a-731b-4cb1-967d-cf84c8d39718.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 761.708741] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0530d257-d822-485e-a808-ff4e31c3174d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.711386] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1d403b0-8fb1-497c-a0fc-f1b9b3dc35a2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.715594] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cc8c2cd7-fe67-42b1-9650-83c2031b28a7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "5f672fd4-b96f-4506-aa1e-96692a00cb43" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 761.715818] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cc8c2cd7-fe67-42b1-9650-83c2031b28a7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "5f672fd4-b96f-4506-aa1e-96692a00cb43" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.716058] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cc8c2cd7-fe67-42b1-9650-83c2031b28a7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "5f672fd4-b96f-4506-aa1e-96692a00cb43-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 761.716266] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cc8c2cd7-fe67-42b1-9650-83c2031b28a7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "5f672fd4-b96f-4506-aa1e-96692a00cb43-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.716431] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cc8c2cd7-fe67-42b1-9650-83c2031b28a7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "5f672fd4-b96f-4506-aa1e-96692a00cb43-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 761.719903] env[69994]: INFO nova.compute.manager [None req-cc8c2cd7-fe67-42b1-9650-83c2031b28a7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Terminating instance [ 761.727891] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b31b7527-2207-4435-8cba-cb26ebcd41bd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.733860] env[69994]: DEBUG oslo_vmware.api [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for the task: (returnval){ [ 761.733860] env[69994]: value = "task-3241639" [ 761.733860] env[69994]: _type = "Task" [ 761.733860] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.749988] env[69994]: DEBUG nova.compute.provider_tree [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 761.754820] env[69994]: DEBUG oslo_vmware.api [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241639, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.948269] env[69994]: DEBUG nova.compute.manager [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 762.113024] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f61c164b-b3ce-4135-99cf-7a27e9af1b83 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Lock "b003b7c2-e754-440e-8a65-13c5e9c68cd5" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.862s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 762.223448] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af6875c5-a710-445b-b5a4-23c66d2d9169 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Acquiring lock "b003b7c2-e754-440e-8a65-13c5e9c68cd5" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 762.223714] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af6875c5-a710-445b-b5a4-23c66d2d9169 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Lock "b003b7c2-e754-440e-8a65-13c5e9c68cd5" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 762.225440] env[69994]: DEBUG nova.compute.manager [None req-cc8c2cd7-fe67-42b1-9650-83c2031b28a7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 762.225639] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cc8c2cd7-fe67-42b1-9650-83c2031b28a7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 762.226691] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05ed456e-c372-4e13-bb49-3a8dd7ca64b4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.235716] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc8c2cd7-fe67-42b1-9650-83c2031b28a7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 762.240192] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-31838a53-6f5c-4802-a07d-c774552b8e54 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.253144] env[69994]: DEBUG oslo_vmware.api [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241639, 'name': Rename_Task, 'duration_secs': 0.183676} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.254397] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 762.254707] env[69994]: DEBUG oslo_vmware.api [None req-cc8c2cd7-fe67-42b1-9650-83c2031b28a7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 762.254707] env[69994]: value = "task-3241640" [ 762.254707] env[69994]: _type = "Task" [ 762.254707] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.255386] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0162fd66-9be7-4ee5-a641-4bcef2eeaf7a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.257475] env[69994]: DEBUG nova.scheduler.client.report [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 762.276135] env[69994]: DEBUG oslo_vmware.api [None req-cc8c2cd7-fe67-42b1-9650-83c2031b28a7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3241640, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.276135] env[69994]: DEBUG oslo_vmware.api [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for the task: (returnval){ [ 762.276135] env[69994]: value = "task-3241641" [ 762.276135] env[69994]: _type = "Task" [ 762.276135] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.284798] env[69994]: DEBUG oslo_vmware.api [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241641, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.475710] env[69994]: DEBUG oslo_concurrency.lockutils [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 762.727681] env[69994]: INFO nova.compute.manager [None req-af6875c5-a710-445b-b5a4-23c66d2d9169 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Detaching volume 3289f92c-da6a-4c4d-b76c-70d8912e0349 [ 762.747638] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12910f70-6680-40e9-b3de-545e4f2e914a tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Acquiring lock "153f0ead-6e2f-4077-b86a-00d3a1114fed" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 762.749948] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12910f70-6680-40e9-b3de-545e4f2e914a tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Lock "153f0ead-6e2f-4077-b86a-00d3a1114fed" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 762.749948] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12910f70-6680-40e9-b3de-545e4f2e914a tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Acquiring lock "153f0ead-6e2f-4077-b86a-00d3a1114fed-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 762.749948] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12910f70-6680-40e9-b3de-545e4f2e914a tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Lock "153f0ead-6e2f-4077-b86a-00d3a1114fed-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 762.749948] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12910f70-6680-40e9-b3de-545e4f2e914a tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Lock "153f0ead-6e2f-4077-b86a-00d3a1114fed-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 762.752279] env[69994]: INFO nova.compute.manager [None req-12910f70-6680-40e9-b3de-545e4f2e914a tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Terminating instance [ 762.768700] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69994) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 762.769023] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 10.348s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 762.769453] env[69994]: DEBUG oslo_vmware.api [None req-cc8c2cd7-fe67-42b1-9650-83c2031b28a7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3241640, 'name': PowerOffVM_Task, 'duration_secs': 0.218993} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.770409] env[69994]: INFO nova.virt.block_device [None req-af6875c5-a710-445b-b5a4-23c66d2d9169 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Attempting to driver detach volume 3289f92c-da6a-4c4d-b76c-70d8912e0349 from mountpoint /dev/sdb [ 762.770716] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-af6875c5-a710-445b-b5a4-23c66d2d9169 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Volume detach. Driver type: vmdk {{(pid=69994) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 762.770801] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-af6875c5-a710-445b-b5a4-23c66d2d9169 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647851', 'volume_id': '3289f92c-da6a-4c4d-b76c-70d8912e0349', 'name': 'volume-3289f92c-da6a-4c4d-b76c-70d8912e0349', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'b003b7c2-e754-440e-8a65-13c5e9c68cd5', 'attached_at': '', 'detached_at': '', 'volume_id': '3289f92c-da6a-4c4d-b76c-70d8912e0349', 'serial': '3289f92c-da6a-4c4d-b76c-70d8912e0349'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 762.771367] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8ad0c885-f0ba-4c47-96b6-ffd99ef4815d tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 45.277s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 762.771635] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8ad0c885-f0ba-4c47-96b6-ffd99ef4815d tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 762.774056] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.914s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 762.775624] env[69994]: INFO nova.compute.claims [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 762.778232] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc8c2cd7-fe67-42b1-9650-83c2031b28a7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 762.778407] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cc8c2cd7-fe67-42b1-9650-83c2031b28a7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 762.779184] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18c7a830-fde0-43b5-a6d9-9a26d2bfd187 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.782602] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fabbabf9-e4af-4a74-ab9b-754b3babc09d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.793663] env[69994]: DEBUG oslo_vmware.api [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241641, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.813583] env[69994]: INFO nova.scheduler.client.report [None req-8ad0c885-f0ba-4c47-96b6-ffd99ef4815d tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Deleted allocations for instance aeb7928a-8307-49e7-b019-a4c674e6369a [ 762.815215] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-115ad7d6-ef1f-426e-9364-fec8606a37ec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.826527] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8415d47-68b8-4da4-844e-b9479e43deed {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.852963] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63cdd15b-6321-4b20-b655-d2d04ab40e7a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.859384] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cc8c2cd7-fe67-42b1-9650-83c2031b28a7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 762.859748] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cc8c2cd7-fe67-42b1-9650-83c2031b28a7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 762.859958] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc8c2cd7-fe67-42b1-9650-83c2031b28a7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Deleting the datastore file [datastore1] 5f672fd4-b96f-4506-aa1e-96692a00cb43 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 762.860568] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-59c1deb4-8ee3-4683-9d10-8b09176fcca8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.875065] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-af6875c5-a710-445b-b5a4-23c66d2d9169 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] The volume has not been displaced from its original location: [datastore2] volume-3289f92c-da6a-4c4d-b76c-70d8912e0349/volume-3289f92c-da6a-4c4d-b76c-70d8912e0349.vmdk. No consolidation needed. {{(pid=69994) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 762.880596] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-af6875c5-a710-445b-b5a4-23c66d2d9169 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Reconfiguring VM instance instance-0000000c to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 762.881709] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bca76531-cb47-4612-9b9e-f91ea728f1fc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.896333] env[69994]: DEBUG oslo_vmware.api [None req-cc8c2cd7-fe67-42b1-9650-83c2031b28a7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 762.896333] env[69994]: value = "task-3241643" [ 762.896333] env[69994]: _type = "Task" [ 762.896333] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.901799] env[69994]: DEBUG oslo_vmware.api [None req-af6875c5-a710-445b-b5a4-23c66d2d9169 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Waiting for the task: (returnval){ [ 762.901799] env[69994]: value = "task-3241644" [ 762.901799] env[69994]: _type = "Task" [ 762.901799] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.905172] env[69994]: DEBUG oslo_vmware.api [None req-cc8c2cd7-fe67-42b1-9650-83c2031b28a7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3241643, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.913580] env[69994]: DEBUG oslo_vmware.api [None req-af6875c5-a710-445b-b5a4-23c66d2d9169 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3241644, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.255349] env[69994]: DEBUG nova.compute.manager [None req-12910f70-6680-40e9-b3de-545e4f2e914a tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 763.255823] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-12910f70-6680-40e9-b3de-545e4f2e914a tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 763.256519] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92c8a404-5837-45bc-b2c6-406b44315a65 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.265137] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-12910f70-6680-40e9-b3de-545e4f2e914a tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 763.265394] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f2c5c7d6-c058-4446-9592-d213941db062 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.272485] env[69994]: DEBUG oslo_vmware.api [None req-12910f70-6680-40e9-b3de-545e4f2e914a tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Waiting for the task: (returnval){ [ 763.272485] env[69994]: value = "task-3241645" [ 763.272485] env[69994]: _type = "Task" [ 763.272485] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.281880] env[69994]: DEBUG oslo_vmware.api [None req-12910f70-6680-40e9-b3de-545e4f2e914a tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241645, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.292896] env[69994]: DEBUG oslo_vmware.api [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241641, 'name': PowerOnVM_Task, 'duration_secs': 0.852999} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.293788] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 763.294010] env[69994]: INFO nova.compute.manager [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Took 9.34 seconds to spawn the instance on the hypervisor. [ 763.294203] env[69994]: DEBUG nova.compute.manager [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 763.294992] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60e24a26-edc9-4900-80ca-38bbec450b89 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.324915] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8ad0c885-f0ba-4c47-96b6-ffd99ef4815d tempest-ImagesOneServerTestJSON-300708171 tempest-ImagesOneServerTestJSON-300708171-project-member] Lock "aeb7928a-8307-49e7-b019-a4c674e6369a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 49.491s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 763.407432] env[69994]: DEBUG oslo_vmware.api [None req-cc8c2cd7-fe67-42b1-9650-83c2031b28a7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3241643, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.185231} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.410908] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc8c2cd7-fe67-42b1-9650-83c2031b28a7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 763.411153] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cc8c2cd7-fe67-42b1-9650-83c2031b28a7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 763.411552] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cc8c2cd7-fe67-42b1-9650-83c2031b28a7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 763.411800] env[69994]: INFO nova.compute.manager [None req-cc8c2cd7-fe67-42b1-9650-83c2031b28a7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Took 1.19 seconds to destroy the instance on the hypervisor. [ 763.412117] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cc8c2cd7-fe67-42b1-9650-83c2031b28a7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 763.412849] env[69994]: DEBUG nova.compute.manager [-] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 763.412972] env[69994]: DEBUG nova.network.neutron [-] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 763.421271] env[69994]: DEBUG oslo_vmware.api [None req-af6875c5-a710-445b-b5a4-23c66d2d9169 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3241644, 'name': ReconfigVM_Task, 'duration_secs': 0.247774} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.423069] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-af6875c5-a710-445b-b5a4-23c66d2d9169 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Reconfigured VM instance instance-0000000c to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 763.427464] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8e96a7d2-90b2-4390-82ee-d21f2ecdd81f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.444577] env[69994]: DEBUG oslo_vmware.api [None req-af6875c5-a710-445b-b5a4-23c66d2d9169 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Waiting for the task: (returnval){ [ 763.444577] env[69994]: value = "task-3241646" [ 763.444577] env[69994]: _type = "Task" [ 763.444577] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.454679] env[69994]: DEBUG oslo_vmware.api [None req-af6875c5-a710-445b-b5a4-23c66d2d9169 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3241646, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.783243] env[69994]: DEBUG oslo_vmware.api [None req-12910f70-6680-40e9-b3de-545e4f2e914a tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241645, 'name': PowerOffVM_Task, 'duration_secs': 0.207542} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.783517] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-12910f70-6680-40e9-b3de-545e4f2e914a tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 763.783664] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-12910f70-6680-40e9-b3de-545e4f2e914a tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 763.785107] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d63c73c0-e1bd-4a97-b617-35138944f31f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.817808] env[69994]: INFO nova.compute.manager [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Took 52.87 seconds to build instance. [ 763.857568] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-12910f70-6680-40e9-b3de-545e4f2e914a tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 763.857568] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-12910f70-6680-40e9-b3de-545e4f2e914a tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 763.857568] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-12910f70-6680-40e9-b3de-545e4f2e914a tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Deleting the datastore file [datastore1] 153f0ead-6e2f-4077-b86a-00d3a1114fed {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 763.858697] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-32bd07f7-56c7-4586-b5e5-9946723515f4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.863183] env[69994]: DEBUG nova.compute.manager [req-2757dfc4-53af-4de6-bee5-eeb56181b232 req-1c442280-341b-4c01-82cd-211bc4d3905c service nova] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Received event network-vif-deleted-42c8060b-e0e2-4cd5-acdb-812565e57ee8 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 763.863551] env[69994]: INFO nova.compute.manager [req-2757dfc4-53af-4de6-bee5-eeb56181b232 req-1c442280-341b-4c01-82cd-211bc4d3905c service nova] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Neutron deleted interface 42c8060b-e0e2-4cd5-acdb-812565e57ee8; detaching it from the instance and deleting it from the info cache [ 763.863855] env[69994]: DEBUG nova.network.neutron [req-2757dfc4-53af-4de6-bee5-eeb56181b232 req-1c442280-341b-4c01-82cd-211bc4d3905c service nova] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.875023] env[69994]: DEBUG oslo_vmware.api [None req-12910f70-6680-40e9-b3de-545e4f2e914a tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Waiting for the task: (returnval){ [ 763.875023] env[69994]: value = "task-3241648" [ 763.875023] env[69994]: _type = "Task" [ 763.875023] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.882443] env[69994]: DEBUG oslo_vmware.api [None req-12910f70-6680-40e9-b3de-545e4f2e914a tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241648, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.962279] env[69994]: DEBUG oslo_vmware.api [None req-af6875c5-a710-445b-b5a4-23c66d2d9169 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3241646, 'name': ReconfigVM_Task, 'duration_secs': 0.144788} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.962279] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-af6875c5-a710-445b-b5a4-23c66d2d9169 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647851', 'volume_id': '3289f92c-da6a-4c4d-b76c-70d8912e0349', 'name': 'volume-3289f92c-da6a-4c4d-b76c-70d8912e0349', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'b003b7c2-e754-440e-8a65-13c5e9c68cd5', 'attached_at': '', 'detached_at': '', 'volume_id': '3289f92c-da6a-4c4d-b76c-70d8912e0349', 'serial': '3289f92c-da6a-4c4d-b76c-70d8912e0349'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 764.324353] env[69994]: DEBUG oslo_concurrency.lockutils [None req-436c834f-ee98-4db5-a194-ea45e8261819 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Lock "558ee84a-731b-4cb1-967d-cf84c8d39718" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 93.368s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 764.337364] env[69994]: DEBUG nova.network.neutron [-] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.371141] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-feb7d3ad-ad11-4e1b-9899-ba7b12df63af {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.386470] env[69994]: DEBUG oslo_vmware.api [None req-12910f70-6680-40e9-b3de-545e4f2e914a tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241648, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14364} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.390666] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-12910f70-6680-40e9-b3de-545e4f2e914a tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 764.390865] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-12910f70-6680-40e9-b3de-545e4f2e914a tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 764.391071] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-12910f70-6680-40e9-b3de-545e4f2e914a tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 764.391361] env[69994]: INFO nova.compute.manager [None req-12910f70-6680-40e9-b3de-545e4f2e914a tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Took 1.14 seconds to destroy the instance on the hypervisor. [ 764.391663] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-12910f70-6680-40e9-b3de-545e4f2e914a tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 764.392928] env[69994]: DEBUG nova.compute.manager [-] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 764.392928] env[69994]: DEBUG nova.network.neutron [-] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 764.396417] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32306950-8d3d-48d6-97c8-0e2964295b6d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.442248] env[69994]: DEBUG nova.compute.manager [req-2757dfc4-53af-4de6-bee5-eeb56181b232 req-1c442280-341b-4c01-82cd-211bc4d3905c service nova] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Detach interface failed, port_id=42c8060b-e0e2-4cd5-acdb-812565e57ee8, reason: Instance 5f672fd4-b96f-4506-aa1e-96692a00cb43 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 764.461553] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-541c67ad-f0ad-47e9-965a-f078319ef4d4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.472327] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22d787bc-912a-44a1-aec7-484adb9ef785 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.531863] env[69994]: DEBUG nova.objects.instance [None req-af6875c5-a710-445b-b5a4-23c66d2d9169 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Lazy-loading 'flavor' on Instance uuid b003b7c2-e754-440e-8a65-13c5e9c68cd5 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 764.536168] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf6219a-57ca-4888-bd64-0df2d18edd33 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.549028] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf8c2665-997a-4031-8ddb-aa52a8c68f0d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.570456] env[69994]: DEBUG nova.compute.provider_tree [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 764.643965] env[69994]: INFO nova.compute.manager [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Rescuing [ 764.644426] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Acquiring lock "refresh_cache-558ee84a-731b-4cb1-967d-cf84c8d39718" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.644615] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Acquired lock "refresh_cache-558ee84a-731b-4cb1-967d-cf84c8d39718" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 764.644790] env[69994]: DEBUG nova.network.neutron [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 764.827804] env[69994]: DEBUG nova.compute.manager [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 764.840585] env[69994]: INFO nova.compute.manager [-] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Took 1.43 seconds to deallocate network for instance. [ 765.073264] env[69994]: DEBUG nova.scheduler.client.report [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 765.198617] env[69994]: DEBUG nova.network.neutron [-] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.348554] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cc8c2cd7-fe67-42b1-9650-83c2031b28a7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 765.356834] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 765.465490] env[69994]: DEBUG nova.network.neutron [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Updating instance_info_cache with network_info: [{"id": "5d9621bb-8ee0-4885-a42e-d68e4c759211", "address": "fa:16:3e:6e:fa:b2", "network": {"id": "0edf701f-8d15-4cdd-a234-ebea64cfa425", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-580718484-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8fb027b5b61c43cdbac3c89eb1e0f2a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d9621bb-8e", "ovs_interfaceid": "5d9621bb-8ee0-4885-a42e-d68e4c759211", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.546063] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af6875c5-a710-445b-b5a4-23c66d2d9169 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Lock "b003b7c2-e754-440e-8a65-13c5e9c68cd5" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.322s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 765.579026] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.805s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 765.579026] env[69994]: DEBUG nova.compute.manager [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 765.581441] env[69994]: DEBUG oslo_concurrency.lockutils [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.298s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 765.582851] env[69994]: INFO nova.compute.claims [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 765.702346] env[69994]: INFO nova.compute.manager [-] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Took 1.31 seconds to deallocate network for instance. [ 765.883262] env[69994]: DEBUG nova.compute.manager [req-8806ea5b-70c8-45fd-97f6-2a5f367a4513 req-b9a278dc-c878-475f-a1b0-4fc85c273b30 service nova] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Received event network-vif-deleted-825d8824-77bf-4808-8066-32caf413dbc7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 765.969737] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Releasing lock "refresh_cache-558ee84a-731b-4cb1-967d-cf84c8d39718" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 766.087701] env[69994]: DEBUG nova.compute.utils [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 766.091058] env[69994]: DEBUG nova.compute.manager [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 766.091292] env[69994]: DEBUG nova.network.neutron [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 766.147250] env[69994]: DEBUG nova.policy [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '849a45b1c7954562891aa0af98bcd508', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7919e01669a04af68d70ddff8fea2cd3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 766.209986] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12910f70-6680-40e9-b3de-545e4f2e914a tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 766.593809] env[69994]: DEBUG nova.compute.manager [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 766.616655] env[69994]: DEBUG nova.network.neutron [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Successfully created port: 064c1f51-da25-4b26-a357-69f406a06504 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 767.156988] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d98a9ce-6490-480c-ac26-9e094c5fc0d2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.166780] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5856d6f7-f019-442d-b8de-0c9fe9d03e6b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.197898] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceb8ee27-a671-4664-bb77-bd57990ee2de {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.206659] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3663ddac-a113-4090-ae33-33f53bc89877 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.222185] env[69994]: DEBUG nova.compute.provider_tree [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 767.285293] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Acquiring lock "ffe5f2c6-69e7-4bdb-80d1-b421b695e790" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 767.285667] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Lock "ffe5f2c6-69e7-4bdb-80d1-b421b695e790" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 767.509501] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 767.509807] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4cd95a89-d6a3-41a2-9667-db4aeb510ed6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.518286] env[69994]: DEBUG oslo_vmware.api [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for the task: (returnval){ [ 767.518286] env[69994]: value = "task-3241649" [ 767.518286] env[69994]: _type = "Task" [ 767.518286] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.527038] env[69994]: DEBUG oslo_vmware.api [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241649, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.612371] env[69994]: DEBUG nova.compute.manager [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 767.656451] env[69994]: DEBUG nova.virt.hardware [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 767.656711] env[69994]: DEBUG nova.virt.hardware [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 767.656869] env[69994]: DEBUG nova.virt.hardware [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 767.657067] env[69994]: DEBUG nova.virt.hardware [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 767.657219] env[69994]: DEBUG nova.virt.hardware [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 767.657368] env[69994]: DEBUG nova.virt.hardware [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 767.657576] env[69994]: DEBUG nova.virt.hardware [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 767.657734] env[69994]: DEBUG nova.virt.hardware [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 767.657900] env[69994]: DEBUG nova.virt.hardware [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 767.658198] env[69994]: DEBUG nova.virt.hardware [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 767.658459] env[69994]: DEBUG nova.virt.hardware [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 767.659378] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65874a10-a9a9-4c47-b7ac-ef9aaf96ef3e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.669829] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-733c83ec-d5a3-49af-8848-e064407f2662 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.726029] env[69994]: DEBUG nova.scheduler.client.report [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 768.028414] env[69994]: DEBUG oslo_vmware.api [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241649, 'name': PowerOffVM_Task, 'duration_secs': 0.295962} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.028702] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 768.029640] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c974f521-0e61-48a9-9dc4-2ef83c5b1dd0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.049876] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edec23c6-b37d-412d-ad13-52428f21f908 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.078980] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 768.079449] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e1ee9b46-48ad-483e-b618-60e12427c4bf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.088500] env[69994]: DEBUG oslo_vmware.api [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for the task: (returnval){ [ 768.088500] env[69994]: value = "task-3241650" [ 768.088500] env[69994]: _type = "Task" [ 768.088500] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.097331] env[69994]: DEBUG oslo_vmware.api [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241650, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.231953] env[69994]: DEBUG oslo_concurrency.lockutils [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.650s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 768.232514] env[69994]: DEBUG nova.compute.manager [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 768.235424] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.513s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 768.236829] env[69994]: INFO nova.compute.claims [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 768.254722] env[69994]: DEBUG nova.compute.manager [req-f6c67169-3cde-44b4-83b9-0ac81bd970d1 req-acdb3b7b-1abf-4dd5-975a-a12c84fb5403 service nova] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Received event network-vif-plugged-064c1f51-da25-4b26-a357-69f406a06504 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 768.254938] env[69994]: DEBUG oslo_concurrency.lockutils [req-f6c67169-3cde-44b4-83b9-0ac81bd970d1 req-acdb3b7b-1abf-4dd5-975a-a12c84fb5403 service nova] Acquiring lock "4dbf53e0-caa1-41f4-8376-dfba8d8567cd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 768.255161] env[69994]: DEBUG oslo_concurrency.lockutils [req-f6c67169-3cde-44b4-83b9-0ac81bd970d1 req-acdb3b7b-1abf-4dd5-975a-a12c84fb5403 service nova] Lock "4dbf53e0-caa1-41f4-8376-dfba8d8567cd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 768.255332] env[69994]: DEBUG oslo_concurrency.lockutils [req-f6c67169-3cde-44b4-83b9-0ac81bd970d1 req-acdb3b7b-1abf-4dd5-975a-a12c84fb5403 service nova] Lock "4dbf53e0-caa1-41f4-8376-dfba8d8567cd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 768.255496] env[69994]: DEBUG nova.compute.manager [req-f6c67169-3cde-44b4-83b9-0ac81bd970d1 req-acdb3b7b-1abf-4dd5-975a-a12c84fb5403 service nova] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] No waiting events found dispatching network-vif-plugged-064c1f51-da25-4b26-a357-69f406a06504 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 768.255658] env[69994]: WARNING nova.compute.manager [req-f6c67169-3cde-44b4-83b9-0ac81bd970d1 req-acdb3b7b-1abf-4dd5-975a-a12c84fb5403 service nova] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Received unexpected event network-vif-plugged-064c1f51-da25-4b26-a357-69f406a06504 for instance with vm_state building and task_state spawning. [ 768.356372] env[69994]: DEBUG nova.network.neutron [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Successfully updated port: 064c1f51-da25-4b26-a357-69f406a06504 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 768.602323] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] VM already powered off {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 768.602521] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 768.602807] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.602894] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 768.603092] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 768.603363] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1d7a510b-d562-487b-a1e6-2bf853926bb2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.618544] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 768.622022] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 768.622022] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ecc77b5a-fa38-4ee2-993b-b03a03cb50bc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.625666] env[69994]: DEBUG oslo_vmware.api [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for the task: (returnval){ [ 768.625666] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52c93a13-54c5-74d5-c9e1-d91186bab4b3" [ 768.625666] env[69994]: _type = "Task" [ 768.625666] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.634211] env[69994]: DEBUG oslo_vmware.api [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52c93a13-54c5-74d5-c9e1-d91186bab4b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.741995] env[69994]: DEBUG nova.compute.utils [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 768.745450] env[69994]: DEBUG nova.compute.manager [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 768.745615] env[69994]: DEBUG nova.network.neutron [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 768.788839] env[69994]: DEBUG nova.policy [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '849a45b1c7954562891aa0af98bcd508', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7919e01669a04af68d70ddff8fea2cd3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 768.862250] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Acquiring lock "refresh_cache-4dbf53e0-caa1-41f4-8376-dfba8d8567cd" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.862250] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Acquired lock "refresh_cache-4dbf53e0-caa1-41f4-8376-dfba8d8567cd" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 768.862395] env[69994]: DEBUG nova.network.neutron [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 769.136476] env[69994]: DEBUG oslo_vmware.api [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52c93a13-54c5-74d5-c9e1-d91186bab4b3, 'name': SearchDatastore_Task, 'duration_secs': 0.013616} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.137590] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f69dcc0c-7c19-453c-ad54-0af1f11590dd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.144562] env[69994]: DEBUG oslo_vmware.api [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for the task: (returnval){ [ 769.144562] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]521325af-b6da-432f-5d63-3147b11ee00a" [ 769.144562] env[69994]: _type = "Task" [ 769.144562] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.154774] env[69994]: DEBUG oslo_vmware.api [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521325af-b6da-432f-5d63-3147b11ee00a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.166385] env[69994]: DEBUG nova.network.neutron [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Successfully created port: aa1c3cb9-5c3d-4700-af3b-94fbe3952be2 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 769.246274] env[69994]: DEBUG nova.compute.manager [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 769.402178] env[69994]: DEBUG nova.network.neutron [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 769.582277] env[69994]: DEBUG nova.network.neutron [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Updating instance_info_cache with network_info: [{"id": "064c1f51-da25-4b26-a357-69f406a06504", "address": "fa:16:3e:e9:4b:41", "network": {"id": "53e3917f-ea44-4917-8b99-db03730593c4", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-881972712-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7919e01669a04af68d70ddff8fea2cd3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap064c1f51-da", "ovs_interfaceid": "064c1f51-da25-4b26-a357-69f406a06504", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 769.659141] env[69994]: DEBUG oslo_vmware.api [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521325af-b6da-432f-5d63-3147b11ee00a, 'name': SearchDatastore_Task, 'duration_secs': 0.04262} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.659141] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 769.659141] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 558ee84a-731b-4cb1-967d-cf84c8d39718/cc2e14cc-b12f-480a-a387-dd21e9efda8b-rescue.vmdk. {{(pid=69994) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 769.659141] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8fc7c1a5-2057-4334-8c84-58bb609cffd4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.672217] env[69994]: DEBUG oslo_vmware.api [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for the task: (returnval){ [ 769.672217] env[69994]: value = "task-3241651" [ 769.672217] env[69994]: _type = "Task" [ 769.672217] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.692528] env[69994]: DEBUG oslo_vmware.api [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241651, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.856384] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36998cff-5937-4c95-acb4-0ca6442f6687 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.866841] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c63ff8b0-1fab-48ae-8fb7-ba0a909b8ebf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.913670] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b4c0fa7-fe11-4d55-8193-e2fb3bdd02b9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.925677] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a0c8696-d341-4f37-9cd2-71dc31ae768a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.949905] env[69994]: DEBUG nova.compute.provider_tree [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 770.085167] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Releasing lock "refresh_cache-4dbf53e0-caa1-41f4-8376-dfba8d8567cd" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 770.085511] env[69994]: DEBUG nova.compute.manager [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Instance network_info: |[{"id": "064c1f51-da25-4b26-a357-69f406a06504", "address": "fa:16:3e:e9:4b:41", "network": {"id": "53e3917f-ea44-4917-8b99-db03730593c4", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-881972712-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7919e01669a04af68d70ddff8fea2cd3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap064c1f51-da", "ovs_interfaceid": "064c1f51-da25-4b26-a357-69f406a06504", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 770.085938] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e9:4b:41', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fe38bb7e-8bcb-419d-868f-0dc105c69651', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '064c1f51-da25-4b26-a357-69f406a06504', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 770.100458] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Creating folder: Project (7919e01669a04af68d70ddff8fea2cd3). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 770.101352] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-046ceecb-8d14-4760-b052-28fff0e2d988 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.116727] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Created folder: Project (7919e01669a04af68d70ddff8fea2cd3) in parent group-v647729. [ 770.116939] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Creating folder: Instances. Parent ref: group-v647853. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 770.117370] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f2feeae7-00e3-4500-898c-94d8a00cdd85 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.131456] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Created folder: Instances in parent group-v647853. [ 770.131727] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 770.132081] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 770.132208] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2e8359fd-b521-46cb-82e3-149ac2d51ae4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.157141] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 770.157141] env[69994]: value = "task-3241654" [ 770.157141] env[69994]: _type = "Task" [ 770.157141] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.172369] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241654, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.185320] env[69994]: DEBUG oslo_vmware.api [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241651, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.260549] env[69994]: DEBUG nova.compute.manager [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 770.283380] env[69994]: DEBUG nova.compute.manager [req-6f66e96c-2ea6-4a9f-8c61-7ad5cc548427 req-7882b54b-a6ac-456c-9d6a-05753dc30aee service nova] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Received event network-changed-064c1f51-da25-4b26-a357-69f406a06504 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 770.283486] env[69994]: DEBUG nova.compute.manager [req-6f66e96c-2ea6-4a9f-8c61-7ad5cc548427 req-7882b54b-a6ac-456c-9d6a-05753dc30aee service nova] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Refreshing instance network info cache due to event network-changed-064c1f51-da25-4b26-a357-69f406a06504. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 770.283772] env[69994]: DEBUG oslo_concurrency.lockutils [req-6f66e96c-2ea6-4a9f-8c61-7ad5cc548427 req-7882b54b-a6ac-456c-9d6a-05753dc30aee service nova] Acquiring lock "refresh_cache-4dbf53e0-caa1-41f4-8376-dfba8d8567cd" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.283926] env[69994]: DEBUG oslo_concurrency.lockutils [req-6f66e96c-2ea6-4a9f-8c61-7ad5cc548427 req-7882b54b-a6ac-456c-9d6a-05753dc30aee service nova] Acquired lock "refresh_cache-4dbf53e0-caa1-41f4-8376-dfba8d8567cd" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 770.284098] env[69994]: DEBUG nova.network.neutron [req-6f66e96c-2ea6-4a9f-8c61-7ad5cc548427 req-7882b54b-a6ac-456c-9d6a-05753dc30aee service nova] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Refreshing network info cache for port 064c1f51-da25-4b26-a357-69f406a06504 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 770.287689] env[69994]: DEBUG nova.virt.hardware [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 770.287689] env[69994]: DEBUG nova.virt.hardware [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 770.287689] env[69994]: DEBUG nova.virt.hardware [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 770.287918] env[69994]: DEBUG nova.virt.hardware [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 770.287918] env[69994]: DEBUG nova.virt.hardware [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 770.287918] env[69994]: DEBUG nova.virt.hardware [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 770.288171] env[69994]: DEBUG nova.virt.hardware [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 770.288341] env[69994]: DEBUG nova.virt.hardware [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 770.288511] env[69994]: DEBUG nova.virt.hardware [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 770.288673] env[69994]: DEBUG nova.virt.hardware [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 770.288843] env[69994]: DEBUG nova.virt.hardware [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 770.289972] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca0cbb5-51fc-4cce-bfd6-f574fda5afe6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.299797] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-958df012-bf1d-427b-a7f4-b984190c7ad8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.453447] env[69994]: DEBUG nova.scheduler.client.report [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 770.668480] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241654, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.685891] env[69994]: DEBUG oslo_vmware.api [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241651, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.520345} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.686179] env[69994]: INFO nova.virt.vmwareapi.ds_util [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 558ee84a-731b-4cb1-967d-cf84c8d39718/cc2e14cc-b12f-480a-a387-dd21e9efda8b-rescue.vmdk. [ 770.687100] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23e68757-434d-4dad-b576-098b01e2ce5d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.716086] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Reconfiguring VM instance instance-00000027 to attach disk [datastore2] 558ee84a-731b-4cb1-967d-cf84c8d39718/cc2e14cc-b12f-480a-a387-dd21e9efda8b-rescue.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 770.716425] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8a461fff-0b4e-49de-ac4d-483f4b6a7f34 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.735262] env[69994]: DEBUG oslo_vmware.api [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for the task: (returnval){ [ 770.735262] env[69994]: value = "task-3241655" [ 770.735262] env[69994]: _type = "Task" [ 770.735262] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.757445] env[69994]: DEBUG oslo_vmware.api [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241655, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.960862] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.725s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 770.961424] env[69994]: DEBUG nova.compute.manager [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 770.963995] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3641fa74-6625-485f-a2e9-de9667c71d26 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.345s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 770.964209] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3641fa74-6625-485f-a2e9-de9667c71d26 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 770.966201] env[69994]: DEBUG oslo_concurrency.lockutils [None req-46f5be88-6d55-419b-9741-c02dd8a3f5df tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.343s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 770.966405] env[69994]: DEBUG oslo_concurrency.lockutils [None req-46f5be88-6d55-419b-9741-c02dd8a3f5df tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 770.968053] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.426s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 770.971186] env[69994]: INFO nova.compute.claims [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 771.010318] env[69994]: DEBUG nova.network.neutron [req-6f66e96c-2ea6-4a9f-8c61-7ad5cc548427 req-7882b54b-a6ac-456c-9d6a-05753dc30aee service nova] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Updated VIF entry in instance network info cache for port 064c1f51-da25-4b26-a357-69f406a06504. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 771.011221] env[69994]: DEBUG nova.network.neutron [req-6f66e96c-2ea6-4a9f-8c61-7ad5cc548427 req-7882b54b-a6ac-456c-9d6a-05753dc30aee service nova] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Updating instance_info_cache with network_info: [{"id": "064c1f51-da25-4b26-a357-69f406a06504", "address": "fa:16:3e:e9:4b:41", "network": {"id": "53e3917f-ea44-4917-8b99-db03730593c4", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-881972712-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7919e01669a04af68d70ddff8fea2cd3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap064c1f51-da", "ovs_interfaceid": "064c1f51-da25-4b26-a357-69f406a06504", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.013259] env[69994]: INFO nova.scheduler.client.report [None req-3641fa74-6625-485f-a2e9-de9667c71d26 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Deleted allocations for instance e8b4640f-302d-43cd-a654-c42f9cb34766 [ 771.015513] env[69994]: INFO nova.scheduler.client.report [None req-46f5be88-6d55-419b-9741-c02dd8a3f5df tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Deleted allocations for instance 493c2d85-eef5-44ae-acfc-2744685135ca [ 771.168712] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241654, 'name': CreateVM_Task, 'duration_secs': 0.614494} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.168712] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 771.169416] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 771.169655] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 771.169987] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 771.170265] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da3de4c5-3706-4b22-b8c1-494d9a0d9ba5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.175790] env[69994]: DEBUG oslo_vmware.api [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Waiting for the task: (returnval){ [ 771.175790] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5292eb11-409e-2dcf-b745-64c82285bdc8" [ 771.175790] env[69994]: _type = "Task" [ 771.175790] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.183631] env[69994]: DEBUG oslo_vmware.api [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5292eb11-409e-2dcf-b745-64c82285bdc8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.240837] env[69994]: DEBUG nova.network.neutron [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Successfully updated port: aa1c3cb9-5c3d-4700-af3b-94fbe3952be2 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 771.248049] env[69994]: DEBUG oslo_vmware.api [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241655, 'name': ReconfigVM_Task, 'duration_secs': 0.366391} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.248385] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Reconfigured VM instance instance-00000027 to attach disk [datastore2] 558ee84a-731b-4cb1-967d-cf84c8d39718/cc2e14cc-b12f-480a-a387-dd21e9efda8b-rescue.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 771.249254] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd8f5464-24f1-4962-af58-d210b62915bf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.278354] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a9fc8a3d-f230-4cf9-9d49-a94e77f47f3d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.300158] env[69994]: DEBUG oslo_vmware.api [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for the task: (returnval){ [ 771.300158] env[69994]: value = "task-3241656" [ 771.300158] env[69994]: _type = "Task" [ 771.300158] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.309572] env[69994]: DEBUG oslo_vmware.api [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241656, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.478166] env[69994]: DEBUG nova.compute.utils [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 771.479141] env[69994]: DEBUG nova.compute.manager [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 771.479355] env[69994]: DEBUG nova.network.neutron [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 771.519793] env[69994]: DEBUG oslo_concurrency.lockutils [req-6f66e96c-2ea6-4a9f-8c61-7ad5cc548427 req-7882b54b-a6ac-456c-9d6a-05753dc30aee service nova] Releasing lock "refresh_cache-4dbf53e0-caa1-41f4-8376-dfba8d8567cd" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 771.524218] env[69994]: DEBUG oslo_concurrency.lockutils [None req-46f5be88-6d55-419b-9741-c02dd8a3f5df tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "493c2d85-eef5-44ae-acfc-2744685135ca" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.200s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 771.526732] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3641fa74-6625-485f-a2e9-de9667c71d26 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "e8b4640f-302d-43cd-a654-c42f9cb34766" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.409s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 771.529361] env[69994]: DEBUG nova.policy [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0bdb2acbbdc04bec8c21d69912b45f43', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '97b5a4565fa644a4a510beb5ba006afb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 771.688476] env[69994]: DEBUG oslo_vmware.api [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5292eb11-409e-2dcf-b745-64c82285bdc8, 'name': SearchDatastore_Task, 'duration_secs': 0.01168} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.688813] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 771.689020] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 771.689282] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 771.689443] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 771.689624] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 771.689882] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e87f8e31-760f-4a9c-8a04-642b370c1ca8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.699754] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 771.699928] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 771.700694] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e5166fd-2193-4ce8-92ab-99441dcde3f1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.707667] env[69994]: DEBUG oslo_vmware.api [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Waiting for the task: (returnval){ [ 771.707667] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]520aa561-e9cc-abe2-751d-ea6c64c083a3" [ 771.707667] env[69994]: _type = "Task" [ 771.707667] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.716295] env[69994]: DEBUG oslo_vmware.api [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]520aa561-e9cc-abe2-751d-ea6c64c083a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.743443] env[69994]: DEBUG oslo_concurrency.lockutils [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Acquiring lock "refresh_cache-214b3508-6fb9-455e-be6b-bd9f6902b7ae" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 771.743610] env[69994]: DEBUG oslo_concurrency.lockutils [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Acquired lock "refresh_cache-214b3508-6fb9-455e-be6b-bd9f6902b7ae" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 771.743740] env[69994]: DEBUG nova.network.neutron [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 771.814318] env[69994]: DEBUG oslo_vmware.api [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241656, 'name': ReconfigVM_Task, 'duration_secs': 0.171518} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.814318] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 771.814318] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d50dd98f-8c21-4886-a9a4-94de626aa8c3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.823417] env[69994]: DEBUG oslo_vmware.api [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for the task: (returnval){ [ 771.823417] env[69994]: value = "task-3241657" [ 771.823417] env[69994]: _type = "Task" [ 771.823417] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.834725] env[69994]: DEBUG oslo_vmware.api [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241657, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.964817] env[69994]: DEBUG nova.network.neutron [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Successfully created port: de62ca8d-e627-414e-a2b0-e988e91c52d0 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 771.983325] env[69994]: DEBUG nova.compute.manager [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 772.218813] env[69994]: DEBUG oslo_vmware.api [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]520aa561-e9cc-abe2-751d-ea6c64c083a3, 'name': SearchDatastore_Task, 'duration_secs': 0.009905} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.219803] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07edfc13-27ef-4c04-a0bf-ac20fe026b1d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.228953] env[69994]: DEBUG oslo_vmware.api [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Waiting for the task: (returnval){ [ 772.228953] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]521f8fd1-bb37-e69e-d374-7980d9f1fe79" [ 772.228953] env[69994]: _type = "Task" [ 772.228953] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.240481] env[69994]: DEBUG oslo_vmware.api [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521f8fd1-bb37-e69e-d374-7980d9f1fe79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.276771] env[69994]: DEBUG nova.network.neutron [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 772.336733] env[69994]: DEBUG oslo_vmware.api [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241657, 'name': PowerOnVM_Task, 'duration_secs': 0.40562} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.337095] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 772.340489] env[69994]: DEBUG nova.compute.manager [None req-9947d097-2337-4759-80fe-5d282ecc14c0 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 772.341792] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2241c4f9-7f37-4516-82a9-135710f40f95 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.433377] env[69994]: DEBUG nova.network.neutron [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Updating instance_info_cache with network_info: [{"id": "aa1c3cb9-5c3d-4700-af3b-94fbe3952be2", "address": "fa:16:3e:b9:96:a2", "network": {"id": "53e3917f-ea44-4917-8b99-db03730593c4", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-881972712-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7919e01669a04af68d70ddff8fea2cd3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa1c3cb9-5c", "ovs_interfaceid": "aa1c3cb9-5c3d-4700-af3b-94fbe3952be2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 772.544900] env[69994]: DEBUG nova.compute.manager [req-fd9812a5-f637-4e44-8741-ef6e09cf324f req-d9e6bd4e-63a0-49c1-8a10-87c59bf7cad6 service nova] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Received event network-vif-plugged-aa1c3cb9-5c3d-4700-af3b-94fbe3952be2 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 772.544900] env[69994]: DEBUG oslo_concurrency.lockutils [req-fd9812a5-f637-4e44-8741-ef6e09cf324f req-d9e6bd4e-63a0-49c1-8a10-87c59bf7cad6 service nova] Acquiring lock "214b3508-6fb9-455e-be6b-bd9f6902b7ae-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 772.545033] env[69994]: DEBUG oslo_concurrency.lockutils [req-fd9812a5-f637-4e44-8741-ef6e09cf324f req-d9e6bd4e-63a0-49c1-8a10-87c59bf7cad6 service nova] Lock "214b3508-6fb9-455e-be6b-bd9f6902b7ae-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 772.545093] env[69994]: DEBUG oslo_concurrency.lockutils [req-fd9812a5-f637-4e44-8741-ef6e09cf324f req-d9e6bd4e-63a0-49c1-8a10-87c59bf7cad6 service nova] Lock "214b3508-6fb9-455e-be6b-bd9f6902b7ae-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 772.545261] env[69994]: DEBUG nova.compute.manager [req-fd9812a5-f637-4e44-8741-ef6e09cf324f req-d9e6bd4e-63a0-49c1-8a10-87c59bf7cad6 service nova] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] No waiting events found dispatching network-vif-plugged-aa1c3cb9-5c3d-4700-af3b-94fbe3952be2 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 772.545416] env[69994]: WARNING nova.compute.manager [req-fd9812a5-f637-4e44-8741-ef6e09cf324f req-d9e6bd4e-63a0-49c1-8a10-87c59bf7cad6 service nova] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Received unexpected event network-vif-plugged-aa1c3cb9-5c3d-4700-af3b-94fbe3952be2 for instance with vm_state building and task_state spawning. [ 772.545576] env[69994]: DEBUG nova.compute.manager [req-fd9812a5-f637-4e44-8741-ef6e09cf324f req-d9e6bd4e-63a0-49c1-8a10-87c59bf7cad6 service nova] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Received event network-changed-aa1c3cb9-5c3d-4700-af3b-94fbe3952be2 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 772.545710] env[69994]: DEBUG nova.compute.manager [req-fd9812a5-f637-4e44-8741-ef6e09cf324f req-d9e6bd4e-63a0-49c1-8a10-87c59bf7cad6 service nova] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Refreshing instance network info cache due to event network-changed-aa1c3cb9-5c3d-4700-af3b-94fbe3952be2. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 772.545867] env[69994]: DEBUG oslo_concurrency.lockutils [req-fd9812a5-f637-4e44-8741-ef6e09cf324f req-d9e6bd4e-63a0-49c1-8a10-87c59bf7cad6 service nova] Acquiring lock "refresh_cache-214b3508-6fb9-455e-be6b-bd9f6902b7ae" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.610942] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0f5b570-9038-41e8-9a4c-d7c5f72595e2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.620592] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c04bd0-bbdb-47dc-8cd8-ae8a8af62176 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.656378] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1022a38-8a63-4f31-8e03-30e611bca0a3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.664666] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c2e21a2-b704-4f70-9d9c-3aa82df3c048 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.680129] env[69994]: DEBUG nova.compute.provider_tree [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 772.739930] env[69994]: DEBUG oslo_vmware.api [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521f8fd1-bb37-e69e-d374-7980d9f1fe79, 'name': SearchDatastore_Task, 'duration_secs': 0.010717} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.740229] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 772.740495] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 4dbf53e0-caa1-41f4-8376-dfba8d8567cd/4dbf53e0-caa1-41f4-8376-dfba8d8567cd.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 772.740771] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d098b0e1-4256-4454-82bb-fafba016adb1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.756464] env[69994]: DEBUG oslo_vmware.api [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Waiting for the task: (returnval){ [ 772.756464] env[69994]: value = "task-3241658" [ 772.756464] env[69994]: _type = "Task" [ 772.756464] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.764502] env[69994]: DEBUG oslo_vmware.api [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241658, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.936261] env[69994]: DEBUG oslo_concurrency.lockutils [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Releasing lock "refresh_cache-214b3508-6fb9-455e-be6b-bd9f6902b7ae" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 772.936621] env[69994]: DEBUG nova.compute.manager [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Instance network_info: |[{"id": "aa1c3cb9-5c3d-4700-af3b-94fbe3952be2", "address": "fa:16:3e:b9:96:a2", "network": {"id": "53e3917f-ea44-4917-8b99-db03730593c4", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-881972712-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7919e01669a04af68d70ddff8fea2cd3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa1c3cb9-5c", "ovs_interfaceid": "aa1c3cb9-5c3d-4700-af3b-94fbe3952be2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 772.936941] env[69994]: DEBUG oslo_concurrency.lockutils [req-fd9812a5-f637-4e44-8741-ef6e09cf324f req-d9e6bd4e-63a0-49c1-8a10-87c59bf7cad6 service nova] Acquired lock "refresh_cache-214b3508-6fb9-455e-be6b-bd9f6902b7ae" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 772.937167] env[69994]: DEBUG nova.network.neutron [req-fd9812a5-f637-4e44-8741-ef6e09cf324f req-d9e6bd4e-63a0-49c1-8a10-87c59bf7cad6 service nova] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Refreshing network info cache for port aa1c3cb9-5c3d-4700-af3b-94fbe3952be2 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 772.938641] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b9:96:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fe38bb7e-8bcb-419d-868f-0dc105c69651', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'aa1c3cb9-5c3d-4700-af3b-94fbe3952be2', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 772.948753] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 772.951889] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 772.952479] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-efda3e36-7e42-4925-a90e-91ebd8da4ff3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.973151] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 772.973151] env[69994]: value = "task-3241659" [ 772.973151] env[69994]: _type = "Task" [ 772.973151] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.982572] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241659, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.994972] env[69994]: DEBUG nova.compute.manager [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 773.027074] env[69994]: DEBUG nova.virt.hardware [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 773.027291] env[69994]: DEBUG nova.virt.hardware [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 773.027736] env[69994]: DEBUG nova.virt.hardware [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 773.027736] env[69994]: DEBUG nova.virt.hardware [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 773.027938] env[69994]: DEBUG nova.virt.hardware [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 773.028124] env[69994]: DEBUG nova.virt.hardware [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 773.028700] env[69994]: DEBUG nova.virt.hardware [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 773.028758] env[69994]: DEBUG nova.virt.hardware [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 773.029054] env[69994]: DEBUG nova.virt.hardware [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 773.029311] env[69994]: DEBUG nova.virt.hardware [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 773.029555] env[69994]: DEBUG nova.virt.hardware [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 773.031087] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-754c01e2-c26e-47eb-8e5c-c7ac8316e5e9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.041194] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b92c00d-3318-4f2e-822e-467c2ed0b803 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.183979] env[69994]: DEBUG nova.scheduler.client.report [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 773.225173] env[69994]: DEBUG nova.network.neutron [req-fd9812a5-f637-4e44-8741-ef6e09cf324f req-d9e6bd4e-63a0-49c1-8a10-87c59bf7cad6 service nova] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Updated VIF entry in instance network info cache for port aa1c3cb9-5c3d-4700-af3b-94fbe3952be2. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 773.225625] env[69994]: DEBUG nova.network.neutron [req-fd9812a5-f637-4e44-8741-ef6e09cf324f req-d9e6bd4e-63a0-49c1-8a10-87c59bf7cad6 service nova] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Updating instance_info_cache with network_info: [{"id": "aa1c3cb9-5c3d-4700-af3b-94fbe3952be2", "address": "fa:16:3e:b9:96:a2", "network": {"id": "53e3917f-ea44-4917-8b99-db03730593c4", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-881972712-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7919e01669a04af68d70ddff8fea2cd3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa1c3cb9-5c", "ovs_interfaceid": "aa1c3cb9-5c3d-4700-af3b-94fbe3952be2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.266840] env[69994]: DEBUG oslo_vmware.api [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241658, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.448566} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.267105] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 4dbf53e0-caa1-41f4-8376-dfba8d8567cd/4dbf53e0-caa1-41f4-8376-dfba8d8567cd.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 773.267319] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 773.267566] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c8ab34c9-0b22-4806-b651-82796b779338 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.274743] env[69994]: DEBUG oslo_vmware.api [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Waiting for the task: (returnval){ [ 773.274743] env[69994]: value = "task-3241660" [ 773.274743] env[69994]: _type = "Task" [ 773.274743] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.282575] env[69994]: DEBUG oslo_vmware.api [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241660, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.301658] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquiring lock "cd5a47f2-147b-4e50-980d-8e1c40bc7594" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 773.301918] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "cd5a47f2-147b-4e50-980d-8e1c40bc7594" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 773.327780] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquiring lock "14b28a21-1b71-4d7e-bd6c-269f5d588300" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 773.328065] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "14b28a21-1b71-4d7e-bd6c-269f5d588300" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 773.484216] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241659, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.689803] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.722s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 773.690341] env[69994]: DEBUG nova.compute.manager [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 773.692900] env[69994]: DEBUG oslo_concurrency.lockutils [None req-387d9f4f-ac53-476a-a1fc-2fed7cf95e3e tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.508s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 773.693107] env[69994]: DEBUG oslo_concurrency.lockutils [None req-387d9f4f-ac53-476a-a1fc-2fed7cf95e3e tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 773.695168] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.682s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 773.698780] env[69994]: INFO nova.compute.claims [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 773.725403] env[69994]: INFO nova.scheduler.client.report [None req-387d9f4f-ac53-476a-a1fc-2fed7cf95e3e tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Deleted allocations for instance 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2 [ 773.729044] env[69994]: DEBUG oslo_concurrency.lockutils [req-fd9812a5-f637-4e44-8741-ef6e09cf324f req-d9e6bd4e-63a0-49c1-8a10-87c59bf7cad6 service nova] Releasing lock "refresh_cache-214b3508-6fb9-455e-be6b-bd9f6902b7ae" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 773.784657] env[69994]: DEBUG oslo_vmware.api [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241660, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083714} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.785166] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 773.786742] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acff508d-acca-4580-b86f-ee3a1f6ca334 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.790331] env[69994]: DEBUG nova.network.neutron [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Successfully updated port: de62ca8d-e627-414e-a2b0-e988e91c52d0 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 773.811916] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Reconfiguring VM instance instance-00000028 to attach disk [datastore1] 4dbf53e0-caa1-41f4-8376-dfba8d8567cd/4dbf53e0-caa1-41f4-8376-dfba8d8567cd.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 773.812828] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ac868d6d-562e-41ca-a881-d087d8139176 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.838187] env[69994]: DEBUG oslo_vmware.api [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Waiting for the task: (returnval){ [ 773.838187] env[69994]: value = "task-3241661" [ 773.838187] env[69994]: _type = "Task" [ 773.838187] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.845845] env[69994]: DEBUG oslo_vmware.api [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241661, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.984837] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241659, 'name': CreateVM_Task, 'duration_secs': 0.557213} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.985063] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 773.985817] env[69994]: DEBUG oslo_concurrency.lockutils [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.986896] env[69994]: DEBUG oslo_concurrency.lockutils [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 773.986896] env[69994]: DEBUG oslo_concurrency.lockutils [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 773.986896] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e673f07a-d3e9-403f-be2a-6167c4c6aee5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.991308] env[69994]: DEBUG oslo_vmware.api [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Waiting for the task: (returnval){ [ 773.991308] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52b4a54e-74ec-f70f-418e-f35edabe6552" [ 773.991308] env[69994]: _type = "Task" [ 773.991308] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.998740] env[69994]: DEBUG oslo_vmware.api [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52b4a54e-74ec-f70f-418e-f35edabe6552, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.155154] env[69994]: INFO nova.compute.manager [None req-bf3c6f27-9f1f-4163-8589-b5091e333c8f tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Unrescuing [ 774.155455] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bf3c6f27-9f1f-4163-8589-b5091e333c8f tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Acquiring lock "refresh_cache-558ee84a-731b-4cb1-967d-cf84c8d39718" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.155612] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bf3c6f27-9f1f-4163-8589-b5091e333c8f tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Acquired lock "refresh_cache-558ee84a-731b-4cb1-967d-cf84c8d39718" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 774.155781] env[69994]: DEBUG nova.network.neutron [None req-bf3c6f27-9f1f-4163-8589-b5091e333c8f tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 774.205151] env[69994]: DEBUG nova.compute.utils [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 774.209534] env[69994]: DEBUG nova.compute.manager [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 774.210567] env[69994]: DEBUG nova.network.neutron [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 774.233233] env[69994]: DEBUG oslo_concurrency.lockutils [None req-387d9f4f-ac53-476a-a1fc-2fed7cf95e3e tempest-ServersAdminNegativeTestJSON-1297532584 tempest-ServersAdminNegativeTestJSON-1297532584-project-member] Lock "2244e8ad-75f6-42bc-a97d-7f26eaba1aa2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.604s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 774.271955] env[69994]: DEBUG nova.policy [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '849a45b1c7954562891aa0af98bcd508', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7919e01669a04af68d70ddff8fea2cd3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 774.291590] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Acquiring lock "refresh_cache-0b284e71-7af2-4782-b950-4f7eac5221a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.291757] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Acquired lock "refresh_cache-0b284e71-7af2-4782-b950-4f7eac5221a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 774.291916] env[69994]: DEBUG nova.network.neutron [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 774.349757] env[69994]: DEBUG oslo_vmware.api [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241661, 'name': ReconfigVM_Task, 'duration_secs': 0.269305} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.350078] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Reconfigured VM instance instance-00000028 to attach disk [datastore1] 4dbf53e0-caa1-41f4-8376-dfba8d8567cd/4dbf53e0-caa1-41f4-8376-dfba8d8567cd.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 774.350744] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ed924d5f-8703-4ea6-b55f-1f9a785276b6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.357060] env[69994]: DEBUG oslo_vmware.api [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Waiting for the task: (returnval){ [ 774.357060] env[69994]: value = "task-3241662" [ 774.357060] env[69994]: _type = "Task" [ 774.357060] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.365996] env[69994]: DEBUG oslo_vmware.api [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241662, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.507030] env[69994]: DEBUG oslo_vmware.api [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52b4a54e-74ec-f70f-418e-f35edabe6552, 'name': SearchDatastore_Task, 'duration_secs': 0.016811} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.507030] env[69994]: DEBUG oslo_concurrency.lockutils [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 774.507030] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 774.507030] env[69994]: DEBUG oslo_concurrency.lockutils [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.507702] env[69994]: DEBUG oslo_concurrency.lockutils [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 774.508021] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 774.508427] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d4edd520-16ac-4349-acb8-f030bc7ae6c3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.516832] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 774.517036] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 774.517788] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa10b954-f1f8-49d9-a355-ec9c4629d83a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.523018] env[69994]: DEBUG oslo_vmware.api [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Waiting for the task: (returnval){ [ 774.523018] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52ddef75-022e-ef41-24ff-fdd2123d465b" [ 774.523018] env[69994]: _type = "Task" [ 774.523018] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.531099] env[69994]: DEBUG oslo_vmware.api [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52ddef75-022e-ef41-24ff-fdd2123d465b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.573173] env[69994]: DEBUG nova.compute.manager [req-48b1f374-be9f-45e8-81bf-b2656512aae5 req-6c245f47-2e0f-4e60-9fd4-7d925dd3efdd service nova] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Received event network-vif-plugged-de62ca8d-e627-414e-a2b0-e988e91c52d0 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 774.573173] env[69994]: DEBUG oslo_concurrency.lockutils [req-48b1f374-be9f-45e8-81bf-b2656512aae5 req-6c245f47-2e0f-4e60-9fd4-7d925dd3efdd service nova] Acquiring lock "0b284e71-7af2-4782-b950-4f7eac5221a4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 774.573173] env[69994]: DEBUG oslo_concurrency.lockutils [req-48b1f374-be9f-45e8-81bf-b2656512aae5 req-6c245f47-2e0f-4e60-9fd4-7d925dd3efdd service nova] Lock "0b284e71-7af2-4782-b950-4f7eac5221a4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 774.573173] env[69994]: DEBUG oslo_concurrency.lockutils [req-48b1f374-be9f-45e8-81bf-b2656512aae5 req-6c245f47-2e0f-4e60-9fd4-7d925dd3efdd service nova] Lock "0b284e71-7af2-4782-b950-4f7eac5221a4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 774.573377] env[69994]: DEBUG nova.compute.manager [req-48b1f374-be9f-45e8-81bf-b2656512aae5 req-6c245f47-2e0f-4e60-9fd4-7d925dd3efdd service nova] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] No waiting events found dispatching network-vif-plugged-de62ca8d-e627-414e-a2b0-e988e91c52d0 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 774.573421] env[69994]: WARNING nova.compute.manager [req-48b1f374-be9f-45e8-81bf-b2656512aae5 req-6c245f47-2e0f-4e60-9fd4-7d925dd3efdd service nova] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Received unexpected event network-vif-plugged-de62ca8d-e627-414e-a2b0-e988e91c52d0 for instance with vm_state building and task_state spawning. [ 774.573570] env[69994]: DEBUG nova.compute.manager [req-48b1f374-be9f-45e8-81bf-b2656512aae5 req-6c245f47-2e0f-4e60-9fd4-7d925dd3efdd service nova] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Received event network-changed-de62ca8d-e627-414e-a2b0-e988e91c52d0 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 774.573720] env[69994]: DEBUG nova.compute.manager [req-48b1f374-be9f-45e8-81bf-b2656512aae5 req-6c245f47-2e0f-4e60-9fd4-7d925dd3efdd service nova] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Refreshing instance network info cache due to event network-changed-de62ca8d-e627-414e-a2b0-e988e91c52d0. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 774.573887] env[69994]: DEBUG oslo_concurrency.lockutils [req-48b1f374-be9f-45e8-81bf-b2656512aae5 req-6c245f47-2e0f-4e60-9fd4-7d925dd3efdd service nova] Acquiring lock "refresh_cache-0b284e71-7af2-4782-b950-4f7eac5221a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.668927] env[69994]: DEBUG nova.network.neutron [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Successfully created port: 9cc26e46-d3c4-47b8-bc39-207d9e40b10d {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 774.709957] env[69994]: DEBUG nova.compute.manager [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 774.863435] env[69994]: DEBUG nova.network.neutron [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 774.870665] env[69994]: DEBUG oslo_vmware.api [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241662, 'name': Rename_Task, 'duration_secs': 0.149448} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.870665] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 774.870895] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-84c96aa1-51fc-462f-ac9f-363389190cef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.878628] env[69994]: DEBUG oslo_vmware.api [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Waiting for the task: (returnval){ [ 774.878628] env[69994]: value = "task-3241663" [ 774.878628] env[69994]: _type = "Task" [ 774.878628] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.894927] env[69994]: DEBUG oslo_vmware.api [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241663, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.928986] env[69994]: DEBUG nova.network.neutron [None req-bf3c6f27-9f1f-4163-8589-b5091e333c8f tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Updating instance_info_cache with network_info: [{"id": "5d9621bb-8ee0-4885-a42e-d68e4c759211", "address": "fa:16:3e:6e:fa:b2", "network": {"id": "0edf701f-8d15-4cdd-a234-ebea64cfa425", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-580718484-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8fb027b5b61c43cdbac3c89eb1e0f2a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d9621bb-8e", "ovs_interfaceid": "5d9621bb-8ee0-4885-a42e-d68e4c759211", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.040693] env[69994]: DEBUG oslo_vmware.api [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52ddef75-022e-ef41-24ff-fdd2123d465b, 'name': SearchDatastore_Task, 'duration_secs': 0.008494} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.044074] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b67c881-162b-4bcb-9a86-838a97022297 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.050172] env[69994]: DEBUG oslo_vmware.api [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Waiting for the task: (returnval){ [ 775.050172] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52719f51-40f4-1271-0b65-0096c1d6c0aa" [ 775.050172] env[69994]: _type = "Task" [ 775.050172] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.058559] env[69994]: DEBUG oslo_vmware.api [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52719f51-40f4-1271-0b65-0096c1d6c0aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.195023] env[69994]: DEBUG nova.network.neutron [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Updating instance_info_cache with network_info: [{"id": "de62ca8d-e627-414e-a2b0-e988e91c52d0", "address": "fa:16:3e:f3:e7:97", "network": {"id": "a40a358d-770e-4ef2-a47e-4e4a4532f844", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1348265973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97b5a4565fa644a4a510beb5ba006afb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde62ca8d-e6", "ovs_interfaceid": "de62ca8d-e627-414e-a2b0-e988e91c52d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.393987] env[69994]: DEBUG oslo_vmware.api [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241663, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.411379] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eaa6ab1-9bba-4268-b92b-970763848f6c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.418694] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6a09642-bb78-46a7-852f-685c9ce33315 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.451537] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bf3c6f27-9f1f-4163-8589-b5091e333c8f tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Releasing lock "refresh_cache-558ee84a-731b-4cb1-967d-cf84c8d39718" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 775.452274] env[69994]: DEBUG nova.objects.instance [None req-bf3c6f27-9f1f-4163-8589-b5091e333c8f tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Lazy-loading 'flavor' on Instance uuid 558ee84a-731b-4cb1-967d-cf84c8d39718 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 775.458021] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb29621-3520-4231-8d96-2956c9c71477 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.462805] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cb23606-f2b2-47f7-ade2-d29af10f0e81 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.477322] env[69994]: DEBUG nova.compute.provider_tree [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 775.559911] env[69994]: DEBUG oslo_vmware.api [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52719f51-40f4-1271-0b65-0096c1d6c0aa, 'name': SearchDatastore_Task, 'duration_secs': 0.009882} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.560269] env[69994]: DEBUG oslo_concurrency.lockutils [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 775.560509] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 214b3508-6fb9-455e-be6b-bd9f6902b7ae/214b3508-6fb9-455e-be6b-bd9f6902b7ae.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 775.560785] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ba1f14f2-1b15-4a09-8df4-9f518463814e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.566539] env[69994]: DEBUG oslo_vmware.api [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Waiting for the task: (returnval){ [ 775.566539] env[69994]: value = "task-3241664" [ 775.566539] env[69994]: _type = "Task" [ 775.566539] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.573900] env[69994]: DEBUG oslo_vmware.api [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241664, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.699884] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Releasing lock "refresh_cache-0b284e71-7af2-4782-b950-4f7eac5221a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 775.700240] env[69994]: DEBUG nova.compute.manager [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Instance network_info: |[{"id": "de62ca8d-e627-414e-a2b0-e988e91c52d0", "address": "fa:16:3e:f3:e7:97", "network": {"id": "a40a358d-770e-4ef2-a47e-4e4a4532f844", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1348265973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97b5a4565fa644a4a510beb5ba006afb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde62ca8d-e6", "ovs_interfaceid": "de62ca8d-e627-414e-a2b0-e988e91c52d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 775.700564] env[69994]: DEBUG oslo_concurrency.lockutils [req-48b1f374-be9f-45e8-81bf-b2656512aae5 req-6c245f47-2e0f-4e60-9fd4-7d925dd3efdd service nova] Acquired lock "refresh_cache-0b284e71-7af2-4782-b950-4f7eac5221a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 775.700827] env[69994]: DEBUG nova.network.neutron [req-48b1f374-be9f-45e8-81bf-b2656512aae5 req-6c245f47-2e0f-4e60-9fd4-7d925dd3efdd service nova] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Refreshing network info cache for port de62ca8d-e627-414e-a2b0-e988e91c52d0 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 775.705024] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f3:e7:97', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7874ee7f-20c7-4bd8-a750-ed489e9acc65', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'de62ca8d-e627-414e-a2b0-e988e91c52d0', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 775.709559] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 775.710026] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 775.710282] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c1e8db00-7223-443e-8788-3624a5722c97 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.726717] env[69994]: DEBUG nova.compute.manager [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 775.734168] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 775.734168] env[69994]: value = "task-3241665" [ 775.734168] env[69994]: _type = "Task" [ 775.734168] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.743938] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241665, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.753598] env[69994]: DEBUG nova.virt.hardware [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 775.753831] env[69994]: DEBUG nova.virt.hardware [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 775.753985] env[69994]: DEBUG nova.virt.hardware [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 775.754178] env[69994]: DEBUG nova.virt.hardware [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 775.754324] env[69994]: DEBUG nova.virt.hardware [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 775.754468] env[69994]: DEBUG nova.virt.hardware [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 775.754687] env[69994]: DEBUG nova.virt.hardware [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 775.754856] env[69994]: DEBUG nova.virt.hardware [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 775.755047] env[69994]: DEBUG nova.virt.hardware [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 775.755219] env[69994]: DEBUG nova.virt.hardware [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 775.755393] env[69994]: DEBUG nova.virt.hardware [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 775.756289] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e3d2eec-de1b-46fd-a66f-6678bd21e524 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.764410] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4580d60-a122-4da7-a20e-3fbf5f6d1c88 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.890592] env[69994]: DEBUG oslo_vmware.api [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241663, 'name': PowerOnVM_Task, 'duration_secs': 0.534891} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.890909] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 775.891143] env[69994]: INFO nova.compute.manager [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Took 8.28 seconds to spawn the instance on the hypervisor. [ 775.891347] env[69994]: DEBUG nova.compute.manager [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 775.892151] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37cc9b19-41d4-4a0c-b381-ea67f838b594 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.958907] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-972f7136-6e81-42be-b01a-cf5d21a3d49c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.985085] env[69994]: DEBUG nova.scheduler.client.report [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 775.988600] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf3c6f27-9f1f-4163-8589-b5091e333c8f tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 775.992051] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9ed5d877-3609-4605-9d82-381192e4a1c1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.998079] env[69994]: DEBUG oslo_vmware.api [None req-bf3c6f27-9f1f-4163-8589-b5091e333c8f tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for the task: (returnval){ [ 775.998079] env[69994]: value = "task-3241666" [ 775.998079] env[69994]: _type = "Task" [ 775.998079] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.006752] env[69994]: DEBUG oslo_vmware.api [None req-bf3c6f27-9f1f-4163-8589-b5091e333c8f tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241666, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.078282] env[69994]: DEBUG oslo_vmware.api [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241664, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.244210] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241665, 'name': CreateVM_Task, 'duration_secs': 0.362489} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.244461] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 776.245179] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.245351] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 776.245693] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 776.245981] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ecc17f64-df3c-4ed2-8883-cf82958d6841 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.251463] env[69994]: DEBUG oslo_vmware.api [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Waiting for the task: (returnval){ [ 776.251463] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5280e2af-b1d6-b490-3842-f7ade0290fa9" [ 776.251463] env[69994]: _type = "Task" [ 776.251463] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.260627] env[69994]: DEBUG oslo_vmware.api [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5280e2af-b1d6-b490-3842-f7ade0290fa9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.407899] env[69994]: INFO nova.compute.manager [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Took 49.56 seconds to build instance. [ 776.490113] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.795s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 776.490695] env[69994]: DEBUG nova.compute.manager [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 776.494487] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.873s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 776.495192] env[69994]: INFO nova.compute.claims [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 776.518763] env[69994]: DEBUG oslo_vmware.api [None req-bf3c6f27-9f1f-4163-8589-b5091e333c8f tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241666, 'name': PowerOffVM_Task, 'duration_secs': 0.221289} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.519026] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf3c6f27-9f1f-4163-8589-b5091e333c8f tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 776.539178] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf3c6f27-9f1f-4163-8589-b5091e333c8f tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Reconfiguring VM instance instance-00000027 to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 776.539178] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4fae45dc-5759-4c0a-8f7b-ec7defb84a7e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.576937] env[69994]: DEBUG oslo_vmware.api [None req-bf3c6f27-9f1f-4163-8589-b5091e333c8f tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for the task: (returnval){ [ 776.576937] env[69994]: value = "task-3241667" [ 776.576937] env[69994]: _type = "Task" [ 776.576937] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.584181] env[69994]: DEBUG oslo_vmware.api [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241664, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.770696} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.584734] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 214b3508-6fb9-455e-be6b-bd9f6902b7ae/214b3508-6fb9-455e-be6b-bd9f6902b7ae.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 776.584969] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 776.585232] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b3c13ef1-fbe5-45dd-bbbd-699ffdda3385 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.590258] env[69994]: DEBUG oslo_vmware.api [None req-bf3c6f27-9f1f-4163-8589-b5091e333c8f tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241667, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.594131] env[69994]: DEBUG oslo_vmware.api [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Waiting for the task: (returnval){ [ 776.594131] env[69994]: value = "task-3241668" [ 776.594131] env[69994]: _type = "Task" [ 776.594131] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.602793] env[69994]: DEBUG oslo_vmware.api [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241668, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.761894] env[69994]: DEBUG oslo_vmware.api [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5280e2af-b1d6-b490-3842-f7ade0290fa9, 'name': SearchDatastore_Task, 'duration_secs': 0.055804} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.762393] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 776.762491] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 776.762735] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.762881] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 776.763069] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 776.763360] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-de7eae5b-1e21-4fcf-a125-1659c5cfc2da {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.780168] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 776.780479] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 776.781491] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-715854cc-4812-4ea2-90ed-4197c6ea8aa9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.789246] env[69994]: DEBUG oslo_vmware.api [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Waiting for the task: (returnval){ [ 776.789246] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]520ea2fe-851a-7ef6-3c24-c426e582f683" [ 776.789246] env[69994]: _type = "Task" [ 776.789246] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.798814] env[69994]: DEBUG oslo_vmware.api [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]520ea2fe-851a-7ef6-3c24-c426e582f683, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.910052] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4f756d8a-dba7-4f4f-a9d5-0de863673cb7 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Lock "4dbf53e0-caa1-41f4-8376-dfba8d8567cd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 100.187s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 777.004734] env[69994]: DEBUG nova.compute.utils [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 777.013444] env[69994]: DEBUG nova.compute.manager [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 777.013444] env[69994]: DEBUG nova.network.neutron [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 777.052902] env[69994]: DEBUG nova.policy [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0330b3b0572a4ce9ad086ba11e029b2c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '38da89daa38f419c88a0f6ff96147715', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 777.092018] env[69994]: DEBUG oslo_vmware.api [None req-bf3c6f27-9f1f-4163-8589-b5091e333c8f tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241667, 'name': ReconfigVM_Task, 'duration_secs': 0.228} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.092340] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf3c6f27-9f1f-4163-8589-b5091e333c8f tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Reconfigured VM instance instance-00000027 to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 777.092526] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf3c6f27-9f1f-4163-8589-b5091e333c8f tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 777.092782] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e3024051-595a-4851-a7b1-13b45b25828d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.104357] env[69994]: DEBUG oslo_vmware.api [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241668, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066625} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.105487] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 777.105817] env[69994]: DEBUG oslo_vmware.api [None req-bf3c6f27-9f1f-4163-8589-b5091e333c8f tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for the task: (returnval){ [ 777.105817] env[69994]: value = "task-3241669" [ 777.105817] env[69994]: _type = "Task" [ 777.105817] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.106527] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66b1cfc3-3ccd-4943-89ca-f88fab1a361d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.121324] env[69994]: DEBUG oslo_vmware.api [None req-bf3c6f27-9f1f-4163-8589-b5091e333c8f tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241669, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.142342] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Reconfiguring VM instance instance-00000029 to attach disk [datastore1] 214b3508-6fb9-455e-be6b-bd9f6902b7ae/214b3508-6fb9-455e-be6b-bd9f6902b7ae.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 777.142679] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f4045672-3a6c-4c9e-a84d-5e3eb8bb938c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.158462] env[69994]: DEBUG nova.network.neutron [req-48b1f374-be9f-45e8-81bf-b2656512aae5 req-6c245f47-2e0f-4e60-9fd4-7d925dd3efdd service nova] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Updated VIF entry in instance network info cache for port de62ca8d-e627-414e-a2b0-e988e91c52d0. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 777.158802] env[69994]: DEBUG nova.network.neutron [req-48b1f374-be9f-45e8-81bf-b2656512aae5 req-6c245f47-2e0f-4e60-9fd4-7d925dd3efdd service nova] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Updating instance_info_cache with network_info: [{"id": "de62ca8d-e627-414e-a2b0-e988e91c52d0", "address": "fa:16:3e:f3:e7:97", "network": {"id": "a40a358d-770e-4ef2-a47e-4e4a4532f844", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1348265973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97b5a4565fa644a4a510beb5ba006afb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde62ca8d-e6", "ovs_interfaceid": "de62ca8d-e627-414e-a2b0-e988e91c52d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.167155] env[69994]: DEBUG oslo_vmware.api [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Waiting for the task: (returnval){ [ 777.167155] env[69994]: value = "task-3241670" [ 777.167155] env[69994]: _type = "Task" [ 777.167155] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.177363] env[69994]: DEBUG oslo_vmware.api [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241670, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.300759] env[69994]: DEBUG oslo_vmware.api [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]520ea2fe-851a-7ef6-3c24-c426e582f683, 'name': SearchDatastore_Task, 'duration_secs': 0.045118} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.301798] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec708b06-d0d4-4d7d-ae6f-9fd5448a4568 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.307140] env[69994]: DEBUG oslo_vmware.api [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Waiting for the task: (returnval){ [ 777.307140] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52ec5a25-0f6b-bb59-542c-371a8de64888" [ 777.307140] env[69994]: _type = "Task" [ 777.307140] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.317071] env[69994]: DEBUG oslo_vmware.api [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52ec5a25-0f6b-bb59-542c-371a8de64888, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.327704] env[69994]: DEBUG nova.network.neutron [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Successfully created port: 8c75c77e-7172-418b-80e0-dc189770afca {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 777.405839] env[69994]: DEBUG nova.network.neutron [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Successfully updated port: 9cc26e46-d3c4-47b8-bc39-207d9e40b10d {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 777.413034] env[69994]: DEBUG nova.compute.manager [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 777.497710] env[69994]: DEBUG nova.compute.manager [req-a14e3f26-5d90-4afb-8601-0200432f9d9d req-c18e660f-c6d0-45a1-9607-8b21dc11a4e1 service nova] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Received event network-vif-plugged-9cc26e46-d3c4-47b8-bc39-207d9e40b10d {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 777.497968] env[69994]: DEBUG oslo_concurrency.lockutils [req-a14e3f26-5d90-4afb-8601-0200432f9d9d req-c18e660f-c6d0-45a1-9607-8b21dc11a4e1 service nova] Acquiring lock "d4f87534-813e-4ff6-8b1f-ee23cb0b8e80-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 777.498245] env[69994]: DEBUG oslo_concurrency.lockutils [req-a14e3f26-5d90-4afb-8601-0200432f9d9d req-c18e660f-c6d0-45a1-9607-8b21dc11a4e1 service nova] Lock "d4f87534-813e-4ff6-8b1f-ee23cb0b8e80-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 777.500621] env[69994]: DEBUG oslo_concurrency.lockutils [req-a14e3f26-5d90-4afb-8601-0200432f9d9d req-c18e660f-c6d0-45a1-9607-8b21dc11a4e1 service nova] Lock "d4f87534-813e-4ff6-8b1f-ee23cb0b8e80-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 777.500621] env[69994]: DEBUG nova.compute.manager [req-a14e3f26-5d90-4afb-8601-0200432f9d9d req-c18e660f-c6d0-45a1-9607-8b21dc11a4e1 service nova] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] No waiting events found dispatching network-vif-plugged-9cc26e46-d3c4-47b8-bc39-207d9e40b10d {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 777.500621] env[69994]: WARNING nova.compute.manager [req-a14e3f26-5d90-4afb-8601-0200432f9d9d req-c18e660f-c6d0-45a1-9607-8b21dc11a4e1 service nova] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Received unexpected event network-vif-plugged-9cc26e46-d3c4-47b8-bc39-207d9e40b10d for instance with vm_state building and task_state spawning. [ 777.513058] env[69994]: DEBUG nova.compute.manager [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 777.620108] env[69994]: DEBUG oslo_vmware.api [None req-bf3c6f27-9f1f-4163-8589-b5091e333c8f tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241669, 'name': PowerOnVM_Task, 'duration_secs': 0.377534} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.620415] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf3c6f27-9f1f-4163-8589-b5091e333c8f tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 777.620700] env[69994]: DEBUG nova.compute.manager [None req-bf3c6f27-9f1f-4163-8589-b5091e333c8f tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 777.621411] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-384b5b8e-626a-4f2e-b856-1b747d53ff2a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.664840] env[69994]: DEBUG oslo_concurrency.lockutils [req-48b1f374-be9f-45e8-81bf-b2656512aae5 req-6c245f47-2e0f-4e60-9fd4-7d925dd3efdd service nova] Releasing lock "refresh_cache-0b284e71-7af2-4782-b950-4f7eac5221a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 777.679015] env[69994]: DEBUG oslo_vmware.api [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241670, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.827112] env[69994]: DEBUG oslo_vmware.api [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52ec5a25-0f6b-bb59-542c-371a8de64888, 'name': SearchDatastore_Task, 'duration_secs': 0.013469} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.827112] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 777.827112] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 0b284e71-7af2-4782-b950-4f7eac5221a4/0b284e71-7af2-4782-b950-4f7eac5221a4.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 777.827112] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8c7aebf6-192b-4683-b0ab-a59ab3e7ba14 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.833981] env[69994]: DEBUG oslo_vmware.api [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Waiting for the task: (returnval){ [ 777.833981] env[69994]: value = "task-3241671" [ 777.833981] env[69994]: _type = "Task" [ 777.833981] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.847312] env[69994]: DEBUG oslo_vmware.api [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': task-3241671, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.909574] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Acquiring lock "refresh_cache-d4f87534-813e-4ff6-8b1f-ee23cb0b8e80" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.909574] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Acquired lock "refresh_cache-d4f87534-813e-4ff6-8b1f-ee23cb0b8e80" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 777.909574] env[69994]: DEBUG nova.network.neutron [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 777.942632] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 778.180845] env[69994]: DEBUG oslo_vmware.api [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241670, 'name': ReconfigVM_Task, 'duration_secs': 0.569707} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.186450] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Reconfigured VM instance instance-00000029 to attach disk [datastore1] 214b3508-6fb9-455e-be6b-bd9f6902b7ae/214b3508-6fb9-455e-be6b-bd9f6902b7ae.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 778.187429] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-512f5e59-6ad5-445c-9b75-c2429dace91f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.195042] env[69994]: DEBUG oslo_vmware.api [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Waiting for the task: (returnval){ [ 778.195042] env[69994]: value = "task-3241672" [ 778.195042] env[69994]: _type = "Task" [ 778.195042] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.206130] env[69994]: DEBUG oslo_vmware.api [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241672, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.277094] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13882323-0d71-4551-b512-09952a327a55 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.286208] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccf07481-5fef-4d78-a9e0-d114f53597dc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.324317] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be0a55d3-fe98-4f55-bc40-d8e65ffa1b3f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.344123] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aa8cd96-67cb-419d-b305-634020c3cab2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.364263] env[69994]: DEBUG nova.compute.provider_tree [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 778.370137] env[69994]: DEBUG oslo_vmware.api [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': task-3241671, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.454146] env[69994]: DEBUG nova.network.neutron [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 778.532018] env[69994]: DEBUG nova.compute.manager [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 778.563680] env[69994]: DEBUG nova.virt.hardware [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 778.563927] env[69994]: DEBUG nova.virt.hardware [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 778.564118] env[69994]: DEBUG nova.virt.hardware [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 778.564318] env[69994]: DEBUG nova.virt.hardware [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 778.564470] env[69994]: DEBUG nova.virt.hardware [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 778.564617] env[69994]: DEBUG nova.virt.hardware [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 778.564819] env[69994]: DEBUG nova.virt.hardware [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 778.565055] env[69994]: DEBUG nova.virt.hardware [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 778.565172] env[69994]: DEBUG nova.virt.hardware [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 778.565356] env[69994]: DEBUG nova.virt.hardware [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 778.565539] env[69994]: DEBUG nova.virt.hardware [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 778.566444] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8086b1e1-9dd9-4293-aba6-6d3cb4349e0e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.574409] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce88a3f6-c114-4fba-9756-314c4eeae355 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.677150] env[69994]: DEBUG nova.network.neutron [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Updating instance_info_cache with network_info: [{"id": "9cc26e46-d3c4-47b8-bc39-207d9e40b10d", "address": "fa:16:3e:f9:e8:71", "network": {"id": "53e3917f-ea44-4917-8b99-db03730593c4", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-881972712-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7919e01669a04af68d70ddff8fea2cd3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cc26e46-d3", "ovs_interfaceid": "9cc26e46-d3c4-47b8-bc39-207d9e40b10d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.711339] env[69994]: DEBUG oslo_vmware.api [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241672, 'name': Rename_Task, 'duration_secs': 0.294653} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.714104] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 778.714104] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c8443db1-b01e-4593-98b9-176e6245474a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.726589] env[69994]: DEBUG oslo_vmware.api [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Waiting for the task: (returnval){ [ 778.726589] env[69994]: value = "task-3241673" [ 778.726589] env[69994]: _type = "Task" [ 778.726589] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.737111] env[69994]: DEBUG oslo_vmware.api [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241673, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.846510] env[69994]: DEBUG oslo_vmware.api [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': task-3241671, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.661493} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.846824] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 0b284e71-7af2-4782-b950-4f7eac5221a4/0b284e71-7af2-4782-b950-4f7eac5221a4.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 778.847073] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 778.849199] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-58cb7cc2-e8dd-44bd-9fc1-d49406cc894f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.856640] env[69994]: DEBUG oslo_vmware.api [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Waiting for the task: (returnval){ [ 778.856640] env[69994]: value = "task-3241674" [ 778.856640] env[69994]: _type = "Task" [ 778.856640] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.865915] env[69994]: DEBUG oslo_vmware.api [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': task-3241674, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.871937] env[69994]: DEBUG nova.scheduler.client.report [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 778.913611] env[69994]: DEBUG nova.network.neutron [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Successfully updated port: 8c75c77e-7172-418b-80e0-dc189770afca {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 778.923704] env[69994]: DEBUG nova.compute.manager [req-725ccd51-11ec-4abb-bd12-9f7e8d213a69 req-64497197-59d8-4185-8023-1d4695dd15de service nova] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Received event network-vif-plugged-8c75c77e-7172-418b-80e0-dc189770afca {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 778.923953] env[69994]: DEBUG oslo_concurrency.lockutils [req-725ccd51-11ec-4abb-bd12-9f7e8d213a69 req-64497197-59d8-4185-8023-1d4695dd15de service nova] Acquiring lock "86e514bb-8b47-4605-bd85-55c6c9874320-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 778.924480] env[69994]: DEBUG oslo_concurrency.lockutils [req-725ccd51-11ec-4abb-bd12-9f7e8d213a69 req-64497197-59d8-4185-8023-1d4695dd15de service nova] Lock "86e514bb-8b47-4605-bd85-55c6c9874320-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 778.924480] env[69994]: DEBUG oslo_concurrency.lockutils [req-725ccd51-11ec-4abb-bd12-9f7e8d213a69 req-64497197-59d8-4185-8023-1d4695dd15de service nova] Lock "86e514bb-8b47-4605-bd85-55c6c9874320-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 778.924664] env[69994]: DEBUG nova.compute.manager [req-725ccd51-11ec-4abb-bd12-9f7e8d213a69 req-64497197-59d8-4185-8023-1d4695dd15de service nova] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] No waiting events found dispatching network-vif-plugged-8c75c77e-7172-418b-80e0-dc189770afca {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 778.924664] env[69994]: WARNING nova.compute.manager [req-725ccd51-11ec-4abb-bd12-9f7e8d213a69 req-64497197-59d8-4185-8023-1d4695dd15de service nova] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Received unexpected event network-vif-plugged-8c75c77e-7172-418b-80e0-dc189770afca for instance with vm_state building and task_state spawning. [ 779.182043] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Releasing lock "refresh_cache-d4f87534-813e-4ff6-8b1f-ee23cb0b8e80" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 779.182043] env[69994]: DEBUG nova.compute.manager [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Instance network_info: |[{"id": "9cc26e46-d3c4-47b8-bc39-207d9e40b10d", "address": "fa:16:3e:f9:e8:71", "network": {"id": "53e3917f-ea44-4917-8b99-db03730593c4", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-881972712-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7919e01669a04af68d70ddff8fea2cd3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cc26e46-d3", "ovs_interfaceid": "9cc26e46-d3c4-47b8-bc39-207d9e40b10d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 779.182237] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f9:e8:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fe38bb7e-8bcb-419d-868f-0dc105c69651', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9cc26e46-d3c4-47b8-bc39-207d9e40b10d', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 779.192625] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 779.193299] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 779.193887] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-787d489c-6e41-472a-9bca-6086fb4c46cd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.220883] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 779.220883] env[69994]: value = "task-3241675" [ 779.220883] env[69994]: _type = "Task" [ 779.220883] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.230261] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241675, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.238057] env[69994]: DEBUG oslo_vmware.api [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241673, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.368428] env[69994]: DEBUG oslo_vmware.api [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': task-3241674, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.260121} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.368779] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 779.369665] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de5e2cd6-13f7-43cb-a68c-aa50a5269152 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.387778] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.895s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 779.388372] env[69994]: DEBUG nova.compute.manager [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 779.400244] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] 0b284e71-7af2-4782-b950-4f7eac5221a4/0b284e71-7af2-4782-b950-4f7eac5221a4.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 779.401034] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.982s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 779.402600] env[69994]: INFO nova.compute.claims [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 779.405215] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b2000d0b-f923-4fa6-885f-b22ab314938f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.421352] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Acquiring lock "refresh_cache-86e514bb-8b47-4605-bd85-55c6c9874320" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.421413] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Acquired lock "refresh_cache-86e514bb-8b47-4605-bd85-55c6c9874320" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 779.421606] env[69994]: DEBUG nova.network.neutron [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 779.428882] env[69994]: DEBUG oslo_vmware.api [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Waiting for the task: (returnval){ [ 779.428882] env[69994]: value = "task-3241676" [ 779.428882] env[69994]: _type = "Task" [ 779.428882] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.441413] env[69994]: DEBUG oslo_vmware.api [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': task-3241676, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.597978] env[69994]: DEBUG nova.compute.manager [req-96465474-ef1f-41b4-a8fd-35296b3b2e74 req-4e3d560f-1996-4fae-93ac-7ea938c4a128 service nova] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Received event network-changed-9cc26e46-d3c4-47b8-bc39-207d9e40b10d {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 779.597978] env[69994]: DEBUG nova.compute.manager [req-96465474-ef1f-41b4-a8fd-35296b3b2e74 req-4e3d560f-1996-4fae-93ac-7ea938c4a128 service nova] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Refreshing instance network info cache due to event network-changed-9cc26e46-d3c4-47b8-bc39-207d9e40b10d. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 779.597978] env[69994]: DEBUG oslo_concurrency.lockutils [req-96465474-ef1f-41b4-a8fd-35296b3b2e74 req-4e3d560f-1996-4fae-93ac-7ea938c4a128 service nova] Acquiring lock "refresh_cache-d4f87534-813e-4ff6-8b1f-ee23cb0b8e80" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.598329] env[69994]: DEBUG oslo_concurrency.lockutils [req-96465474-ef1f-41b4-a8fd-35296b3b2e74 req-4e3d560f-1996-4fae-93ac-7ea938c4a128 service nova] Acquired lock "refresh_cache-d4f87534-813e-4ff6-8b1f-ee23cb0b8e80" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 779.598329] env[69994]: DEBUG nova.network.neutron [req-96465474-ef1f-41b4-a8fd-35296b3b2e74 req-4e3d560f-1996-4fae-93ac-7ea938c4a128 service nova] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Refreshing network info cache for port 9cc26e46-d3c4-47b8-bc39-207d9e40b10d {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 779.651911] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1a1722d4-fc75-4439-be98-c597bf65ea85 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Acquiring lock "558ee84a-731b-4cb1-967d-cf84c8d39718" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 779.652469] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1a1722d4-fc75-4439-be98-c597bf65ea85 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Lock "558ee84a-731b-4cb1-967d-cf84c8d39718" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 779.652727] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1a1722d4-fc75-4439-be98-c597bf65ea85 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Acquiring lock "558ee84a-731b-4cb1-967d-cf84c8d39718-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 779.652933] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1a1722d4-fc75-4439-be98-c597bf65ea85 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Lock "558ee84a-731b-4cb1-967d-cf84c8d39718-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 779.653114] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1a1722d4-fc75-4439-be98-c597bf65ea85 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Lock "558ee84a-731b-4cb1-967d-cf84c8d39718-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 779.660121] env[69994]: INFO nova.compute.manager [None req-1a1722d4-fc75-4439-be98-c597bf65ea85 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Terminating instance [ 779.734843] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241675, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.751409] env[69994]: DEBUG oslo_vmware.api [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241673, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.902177] env[69994]: DEBUG nova.compute.utils [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 779.903653] env[69994]: DEBUG nova.compute.manager [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 779.903851] env[69994]: DEBUG nova.network.neutron [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 779.942586] env[69994]: DEBUG oslo_vmware.api [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': task-3241676, 'name': ReconfigVM_Task, 'duration_secs': 0.325349} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.944300] env[69994]: DEBUG nova.policy [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '471a2a9e2b4a4d3da7935b67e87b0fe8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '30a9ea2f804f49ec8c5c6861b507454e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 779.946160] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Reconfigured VM instance instance-0000002a to attach disk [datastore1] 0b284e71-7af2-4782-b950-4f7eac5221a4/0b284e71-7af2-4782-b950-4f7eac5221a4.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 779.946990] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9ac1446f-3f0a-4e53-b8a1-938abd688697 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.957547] env[69994]: DEBUG oslo_vmware.api [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Waiting for the task: (returnval){ [ 779.957547] env[69994]: value = "task-3241677" [ 779.957547] env[69994]: _type = "Task" [ 779.957547] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.958401] env[69994]: DEBUG nova.network.neutron [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 779.969555] env[69994]: DEBUG oslo_vmware.api [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': task-3241677, 'name': Rename_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.169029] env[69994]: DEBUG nova.compute.manager [None req-1a1722d4-fc75-4439-be98-c597bf65ea85 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 780.169029] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1a1722d4-fc75-4439-be98-c597bf65ea85 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 780.169029] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c60df72-4837-4333-89a1-85f000eb11a4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.178039] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a1722d4-fc75-4439-be98-c597bf65ea85 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 780.178572] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-84a70bb5-1f71-4125-8c01-45701d17a6ef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.186594] env[69994]: DEBUG oslo_vmware.api [None req-1a1722d4-fc75-4439-be98-c597bf65ea85 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for the task: (returnval){ [ 780.186594] env[69994]: value = "task-3241678" [ 780.186594] env[69994]: _type = "Task" [ 780.186594] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.197478] env[69994]: DEBUG oslo_vmware.api [None req-1a1722d4-fc75-4439-be98-c597bf65ea85 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241678, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.224147] env[69994]: DEBUG nova.network.neutron [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Updating instance_info_cache with network_info: [{"id": "8c75c77e-7172-418b-80e0-dc189770afca", "address": "fa:16:3e:a3:b7:46", "network": {"id": "34c8132f-d505-4dbd-818c-539e1f6a8beb", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-2126102519-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38da89daa38f419c88a0f6ff96147715", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c75c77e-71", "ovs_interfaceid": "8c75c77e-7172-418b-80e0-dc189770afca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.235215] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241675, 'name': CreateVM_Task, 'duration_secs': 0.53541} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.241124] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 780.241124] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.241124] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 780.241124] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 780.242627] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb6400a6-5e70-4f14-910b-3fcf679c62b0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.247514] env[69994]: DEBUG oslo_vmware.api [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241673, 'name': PowerOnVM_Task, 'duration_secs': 1.02573} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.250481] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 780.250935] env[69994]: INFO nova.compute.manager [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Took 9.99 seconds to spawn the instance on the hypervisor. [ 780.251189] env[69994]: DEBUG nova.compute.manager [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 780.258221] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f19ad7a3-ab2e-4309-9628-3821dd0e2832 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.260551] env[69994]: DEBUG oslo_vmware.api [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Waiting for the task: (returnval){ [ 780.260551] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52f7353e-1d4f-5bce-9d69-b76c27c0c5f8" [ 780.260551] env[69994]: _type = "Task" [ 780.260551] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.284224] env[69994]: DEBUG oslo_vmware.api [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52f7353e-1d4f-5bce-9d69-b76c27c0c5f8, 'name': SearchDatastore_Task, 'duration_secs': 0.017591} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.286121] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 780.286121] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 780.286296] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.287049] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 780.287307] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 780.288045] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cd2adb35-d1ad-4fe6-a16c-f0d00f47a90a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.299853] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 780.300285] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 780.302042] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b885b9f2-f25b-463b-9696-023599ae090c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.312021] env[69994]: DEBUG oslo_vmware.api [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Waiting for the task: (returnval){ [ 780.312021] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5221ad73-c3fb-bb2c-ebc6-383b761b66d1" [ 780.312021] env[69994]: _type = "Task" [ 780.312021] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.322311] env[69994]: DEBUG oslo_vmware.api [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5221ad73-c3fb-bb2c-ebc6-383b761b66d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.406856] env[69994]: DEBUG nova.compute.manager [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 780.475790] env[69994]: DEBUG oslo_vmware.api [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': task-3241677, 'name': Rename_Task, 'duration_secs': 0.143957} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.476700] env[69994]: DEBUG nova.network.neutron [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Successfully created port: de3e77dc-7712-4e45-b1d3-fd50595cb0f2 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 780.478628] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 780.481546] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5adc0642-18a1-4c97-bfff-61ee152034f4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.488552] env[69994]: DEBUG oslo_vmware.api [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Waiting for the task: (returnval){ [ 780.488552] env[69994]: value = "task-3241679" [ 780.488552] env[69994]: _type = "Task" [ 780.488552] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.499318] env[69994]: DEBUG oslo_vmware.api [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': task-3241679, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.611054] env[69994]: DEBUG nova.network.neutron [req-96465474-ef1f-41b4-a8fd-35296b3b2e74 req-4e3d560f-1996-4fae-93ac-7ea938c4a128 service nova] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Updated VIF entry in instance network info cache for port 9cc26e46-d3c4-47b8-bc39-207d9e40b10d. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 780.611054] env[69994]: DEBUG nova.network.neutron [req-96465474-ef1f-41b4-a8fd-35296b3b2e74 req-4e3d560f-1996-4fae-93ac-7ea938c4a128 service nova] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Updating instance_info_cache with network_info: [{"id": "9cc26e46-d3c4-47b8-bc39-207d9e40b10d", "address": "fa:16:3e:f9:e8:71", "network": {"id": "53e3917f-ea44-4917-8b99-db03730593c4", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-881972712-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7919e01669a04af68d70ddff8fea2cd3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cc26e46-d3", "ovs_interfaceid": "9cc26e46-d3c4-47b8-bc39-207d9e40b10d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.698941] env[69994]: DEBUG oslo_vmware.api [None req-1a1722d4-fc75-4439-be98-c597bf65ea85 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241678, 'name': PowerOffVM_Task, 'duration_secs': 0.311575} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.699275] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a1722d4-fc75-4439-be98-c597bf65ea85 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 780.699439] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1a1722d4-fc75-4439-be98-c597bf65ea85 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 780.699699] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c8321812-dad0-4c8c-bb9a-0357688b4c0d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.729776] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Releasing lock "refresh_cache-86e514bb-8b47-4605-bd85-55c6c9874320" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 780.730145] env[69994]: DEBUG nova.compute.manager [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Instance network_info: |[{"id": "8c75c77e-7172-418b-80e0-dc189770afca", "address": "fa:16:3e:a3:b7:46", "network": {"id": "34c8132f-d505-4dbd-818c-539e1f6a8beb", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-2126102519-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38da89daa38f419c88a0f6ff96147715", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c75c77e-71", "ovs_interfaceid": "8c75c77e-7172-418b-80e0-dc189770afca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 780.730808] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a3:b7:46', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7e0240aa-a694-48fc-a0f9-6f2d3e71aa12', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8c75c77e-7172-418b-80e0-dc189770afca', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 780.738551] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Creating folder: Project (38da89daa38f419c88a0f6ff96147715). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 780.739085] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7477f9e9-e312-4c25-9615-29fb09e0f1c5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.750791] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Created folder: Project (38da89daa38f419c88a0f6ff96147715) in parent group-v647729. [ 780.750791] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Creating folder: Instances. Parent ref: group-v647859. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 780.750791] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-67fc78f5-fdcc-4bbf-8fbb-5691c5b0c748 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.762065] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Created folder: Instances in parent group-v647859. [ 780.762065] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 780.762065] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 780.764702] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cd15f4f1-7d02-426b-8adc-164b0adfa80a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.789129] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1a1722d4-fc75-4439-be98-c597bf65ea85 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 780.789129] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1a1722d4-fc75-4439-be98-c597bf65ea85 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 780.789129] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a1722d4-fc75-4439-be98-c597bf65ea85 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Deleting the datastore file [datastore2] 558ee84a-731b-4cb1-967d-cf84c8d39718 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 780.789606] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b74b7671-5ce2-4f91-9b03-9d07564df951 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.795302] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 780.795302] env[69994]: value = "task-3241685" [ 780.795302] env[69994]: _type = "Task" [ 780.795302] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.795877] env[69994]: INFO nova.compute.manager [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Took 51.54 seconds to build instance. [ 780.805525] env[69994]: DEBUG oslo_vmware.api [None req-1a1722d4-fc75-4439-be98-c597bf65ea85 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for the task: (returnval){ [ 780.805525] env[69994]: value = "task-3241687" [ 780.805525] env[69994]: _type = "Task" [ 780.805525] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.813380] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241685, 'name': CreateVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.823367] env[69994]: DEBUG oslo_vmware.api [None req-1a1722d4-fc75-4439-be98-c597bf65ea85 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241687, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.830245] env[69994]: DEBUG oslo_vmware.api [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5221ad73-c3fb-bb2c-ebc6-383b761b66d1, 'name': SearchDatastore_Task, 'duration_secs': 0.012146} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.832132] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2fae6b0-80a7-4760-b924-439487bd8473 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.837399] env[69994]: DEBUG oslo_vmware.api [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Waiting for the task: (returnval){ [ 780.837399] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]522c812d-1223-e44d-e157-bf5ae18fe155" [ 780.837399] env[69994]: _type = "Task" [ 780.837399] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.851331] env[69994]: DEBUG oslo_vmware.api [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]522c812d-1223-e44d-e157-bf5ae18fe155, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.001220] env[69994]: DEBUG oslo_vmware.api [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': task-3241679, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.104696] env[69994]: DEBUG nova.compute.manager [req-dd6f917f-8847-4090-8c1f-ae7a918fe86d req-6b391a54-fbb9-43bb-9555-99a34057db18 service nova] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Received event network-changed-8c75c77e-7172-418b-80e0-dc189770afca {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 781.104914] env[69994]: DEBUG nova.compute.manager [req-dd6f917f-8847-4090-8c1f-ae7a918fe86d req-6b391a54-fbb9-43bb-9555-99a34057db18 service nova] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Refreshing instance network info cache due to event network-changed-8c75c77e-7172-418b-80e0-dc189770afca. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 781.105310] env[69994]: DEBUG oslo_concurrency.lockutils [req-dd6f917f-8847-4090-8c1f-ae7a918fe86d req-6b391a54-fbb9-43bb-9555-99a34057db18 service nova] Acquiring lock "refresh_cache-86e514bb-8b47-4605-bd85-55c6c9874320" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.105393] env[69994]: DEBUG oslo_concurrency.lockutils [req-dd6f917f-8847-4090-8c1f-ae7a918fe86d req-6b391a54-fbb9-43bb-9555-99a34057db18 service nova] Acquired lock "refresh_cache-86e514bb-8b47-4605-bd85-55c6c9874320" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 781.105546] env[69994]: DEBUG nova.network.neutron [req-dd6f917f-8847-4090-8c1f-ae7a918fe86d req-6b391a54-fbb9-43bb-9555-99a34057db18 service nova] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Refreshing network info cache for port 8c75c77e-7172-418b-80e0-dc189770afca {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 781.116392] env[69994]: DEBUG oslo_concurrency.lockutils [req-96465474-ef1f-41b4-a8fd-35296b3b2e74 req-4e3d560f-1996-4fae-93ac-7ea938c4a128 service nova] Releasing lock "refresh_cache-d4f87534-813e-4ff6-8b1f-ee23cb0b8e80" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 781.157931] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03c91a61-93b4-4690-90a2-ff516943b2e7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.166059] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66fe6820-fb32-48d4-a562-e0b3c3de6446 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.198335] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aa539d0-042c-4cbb-8e1b-aebda08974c6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.205442] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5af399fb-56a7-482a-bd08-21a6271088dd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.218737] env[69994]: DEBUG nova.compute.provider_tree [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 781.306051] env[69994]: DEBUG oslo_concurrency.lockutils [None req-620a7797-c918-4d22-bd53-c072c4fcd7a5 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Lock "214b3508-6fb9-455e-be6b-bd9f6902b7ae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 102.326s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 781.306298] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241685, 'name': CreateVM_Task, 'duration_secs': 0.451452} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.308313] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 781.313324] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.313428] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 781.313755] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 781.314026] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-82451de5-b6d3-487d-9892-774561c212e6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.321155] env[69994]: DEBUG oslo_vmware.api [None req-1a1722d4-fc75-4439-be98-c597bf65ea85 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241687, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.237848} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.322485] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a1722d4-fc75-4439-be98-c597bf65ea85 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 781.322692] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1a1722d4-fc75-4439-be98-c597bf65ea85 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 781.322887] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1a1722d4-fc75-4439-be98-c597bf65ea85 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 781.323090] env[69994]: INFO nova.compute.manager [None req-1a1722d4-fc75-4439-be98-c597bf65ea85 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Took 1.16 seconds to destroy the instance on the hypervisor. [ 781.323364] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1a1722d4-fc75-4439-be98-c597bf65ea85 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 781.323663] env[69994]: DEBUG oslo_vmware.api [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Waiting for the task: (returnval){ [ 781.323663] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52b01037-4263-4bd3-166b-04a6b88f2e2e" [ 781.323663] env[69994]: _type = "Task" [ 781.323663] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.324990] env[69994]: DEBUG nova.compute.manager [-] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 781.325154] env[69994]: DEBUG nova.network.neutron [-] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 781.337301] env[69994]: DEBUG oslo_vmware.api [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52b01037-4263-4bd3-166b-04a6b88f2e2e, 'name': SearchDatastore_Task, 'duration_secs': 0.011121} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.337608] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 781.337800] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 781.338070] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.338272] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 781.338497] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 781.338814] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7f7fcef9-26ff-4c1b-8e04-782906f58678 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.349977] env[69994]: DEBUG oslo_vmware.api [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]522c812d-1223-e44d-e157-bf5ae18fe155, 'name': SearchDatastore_Task, 'duration_secs': 0.012253} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.351126] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 781.351411] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] d4f87534-813e-4ff6-8b1f-ee23cb0b8e80/d4f87534-813e-4ff6-8b1f-ee23cb0b8e80.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 781.351713] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 781.351922] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 781.352716] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c6e3af96-08ab-4908-bd62-dda88bc2b148 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.354677] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c5c29fb-94fa-4f75-90a1-5b30efffa63a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.361507] env[69994]: DEBUG oslo_vmware.api [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Waiting for the task: (returnval){ [ 781.361507] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]522c9ea4-6eb6-f243-1a76-4a124093c84f" [ 781.361507] env[69994]: _type = "Task" [ 781.361507] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.365418] env[69994]: DEBUG oslo_vmware.api [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Waiting for the task: (returnval){ [ 781.365418] env[69994]: value = "task-3241688" [ 781.365418] env[69994]: _type = "Task" [ 781.365418] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.372432] env[69994]: DEBUG oslo_vmware.api [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]522c9ea4-6eb6-f243-1a76-4a124093c84f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.377325] env[69994]: DEBUG oslo_vmware.api [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241688, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.424137] env[69994]: DEBUG nova.compute.manager [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 781.451456] env[69994]: DEBUG nova.virt.hardware [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 781.451732] env[69994]: DEBUG nova.virt.hardware [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 781.451888] env[69994]: DEBUG nova.virt.hardware [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 781.452078] env[69994]: DEBUG nova.virt.hardware [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 781.452231] env[69994]: DEBUG nova.virt.hardware [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 781.452377] env[69994]: DEBUG nova.virt.hardware [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 781.452580] env[69994]: DEBUG nova.virt.hardware [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 781.452778] env[69994]: DEBUG nova.virt.hardware [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 781.452950] env[69994]: DEBUG nova.virt.hardware [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 781.453124] env[69994]: DEBUG nova.virt.hardware [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 781.453299] env[69994]: DEBUG nova.virt.hardware [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 781.454613] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0212be57-4e9f-40c7-8acc-9e1085f281a9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.466750] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27ff3375-04f7-4e4e-b154-8088fa16c2c3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.501456] env[69994]: DEBUG oslo_vmware.api [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': task-3241679, 'name': PowerOnVM_Task, 'duration_secs': 0.531424} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.501797] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 781.502105] env[69994]: INFO nova.compute.manager [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Took 8.51 seconds to spawn the instance on the hypervisor. [ 781.502328] env[69994]: DEBUG nova.compute.manager [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 781.503140] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-184163a1-759e-4968-bc5f-b65903b25605 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.656158] env[69994]: DEBUG nova.compute.manager [req-5f70d510-eeb0-421f-b885-556bad151f56 req-2ae8d8e2-333e-438b-a26c-f17587e8f1d8 service nova] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Received event network-vif-deleted-5d9621bb-8ee0-4885-a42e-d68e4c759211 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 781.656158] env[69994]: INFO nova.compute.manager [req-5f70d510-eeb0-421f-b885-556bad151f56 req-2ae8d8e2-333e-438b-a26c-f17587e8f1d8 service nova] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Neutron deleted interface 5d9621bb-8ee0-4885-a42e-d68e4c759211; detaching it from the instance and deleting it from the info cache [ 781.656158] env[69994]: DEBUG nova.network.neutron [req-5f70d510-eeb0-421f-b885-556bad151f56 req-2ae8d8e2-333e-438b-a26c-f17587e8f1d8 service nova] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.723784] env[69994]: DEBUG nova.scheduler.client.report [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 781.810132] env[69994]: DEBUG nova.compute.manager [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 781.875662] env[69994]: DEBUG oslo_vmware.api [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241688, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.879459] env[69994]: DEBUG oslo_vmware.api [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]522c9ea4-6eb6-f243-1a76-4a124093c84f, 'name': SearchDatastore_Task, 'duration_secs': 0.010194} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.880284] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8cf1a88d-819b-4fc9-9919-8e52b9f481fc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.885584] env[69994]: DEBUG oslo_vmware.api [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Waiting for the task: (returnval){ [ 781.885584] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]528a6506-0dbc-9b40-c124-a5579c7bc175" [ 781.885584] env[69994]: _type = "Task" [ 781.885584] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.899245] env[69994]: DEBUG oslo_vmware.api [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]528a6506-0dbc-9b40-c124-a5579c7bc175, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.940414] env[69994]: DEBUG nova.network.neutron [req-dd6f917f-8847-4090-8c1f-ae7a918fe86d req-6b391a54-fbb9-43bb-9555-99a34057db18 service nova] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Updated VIF entry in instance network info cache for port 8c75c77e-7172-418b-80e0-dc189770afca. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 781.940414] env[69994]: DEBUG nova.network.neutron [req-dd6f917f-8847-4090-8c1f-ae7a918fe86d req-6b391a54-fbb9-43bb-9555-99a34057db18 service nova] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Updating instance_info_cache with network_info: [{"id": "8c75c77e-7172-418b-80e0-dc189770afca", "address": "fa:16:3e:a3:b7:46", "network": {"id": "34c8132f-d505-4dbd-818c-539e1f6a8beb", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-2126102519-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38da89daa38f419c88a0f6ff96147715", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c75c77e-71", "ovs_interfaceid": "8c75c77e-7172-418b-80e0-dc189770afca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.023553] env[69994]: INFO nova.compute.manager [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Took 50.32 seconds to build instance. [ 782.044095] env[69994]: DEBUG nova.network.neutron [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Successfully updated port: de3e77dc-7712-4e45-b1d3-fd50595cb0f2 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 782.138354] env[69994]: DEBUG nova.network.neutron [-] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.158096] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-820f7470-d64a-49e8-b44a-91ba5dd803c5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.168958] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43b3e2a6-ff66-40c6-a70d-2c4fb76c9ccb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.208528] env[69994]: DEBUG nova.compute.manager [req-5f70d510-eeb0-421f-b885-556bad151f56 req-2ae8d8e2-333e-438b-a26c-f17587e8f1d8 service nova] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Detach interface failed, port_id=5d9621bb-8ee0-4885-a42e-d68e4c759211, reason: Instance 558ee84a-731b-4cb1-967d-cf84c8d39718 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 782.232190] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.831s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 782.232911] env[69994]: DEBUG nova.compute.manager [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 782.239132] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a8ad7306-023f-4618-bd7d-525ece4c7641 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.580s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 782.239403] env[69994]: DEBUG nova.objects.instance [None req-a8ad7306-023f-4618-bd7d-525ece4c7641 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Lazy-loading 'resources' on Instance uuid 7e7953f7-ed5d-4515-9181-93d343ad772d {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 782.335402] env[69994]: DEBUG oslo_concurrency.lockutils [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 782.375996] env[69994]: DEBUG oslo_vmware.api [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241688, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.907027} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.376271] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] d4f87534-813e-4ff6-8b1f-ee23cb0b8e80/d4f87534-813e-4ff6-8b1f-ee23cb0b8e80.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 782.376488] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 782.376749] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-354bc5c3-2b2a-4e59-9cca-05b4f24955ef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.382667] env[69994]: DEBUG oslo_vmware.api [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Waiting for the task: (returnval){ [ 782.382667] env[69994]: value = "task-3241689" [ 782.382667] env[69994]: _type = "Task" [ 782.382667] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.393845] env[69994]: DEBUG oslo_vmware.api [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241689, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.397909] env[69994]: DEBUG oslo_vmware.api [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]528a6506-0dbc-9b40-c124-a5579c7bc175, 'name': SearchDatastore_Task, 'duration_secs': 0.016505} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.398169] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 782.398455] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 86e514bb-8b47-4605-bd85-55c6c9874320/86e514bb-8b47-4605-bd85-55c6c9874320.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 782.398701] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1efa5481-de4b-45de-8659-a6164f541d14 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.405412] env[69994]: DEBUG oslo_vmware.api [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Waiting for the task: (returnval){ [ 782.405412] env[69994]: value = "task-3241690" [ 782.405412] env[69994]: _type = "Task" [ 782.405412] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.414544] env[69994]: DEBUG oslo_vmware.api [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Task: {'id': task-3241690, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.442529] env[69994]: DEBUG oslo_concurrency.lockutils [req-dd6f917f-8847-4090-8c1f-ae7a918fe86d req-6b391a54-fbb9-43bb-9555-99a34057db18 service nova] Releasing lock "refresh_cache-86e514bb-8b47-4605-bd85-55c6c9874320" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 782.525534] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6911ad78-929f-44d0-b728-39b5f6a5dce8 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Lock "0b284e71-7af2-4782-b950-4f7eac5221a4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 102.645s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 782.546689] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquiring lock "refresh_cache-ed662f67-be0e-4f19-bb8a-6af39b4d348c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.546875] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquired lock "refresh_cache-ed662f67-be0e-4f19-bb8a-6af39b4d348c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 782.547072] env[69994]: DEBUG nova.network.neutron [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 782.641489] env[69994]: INFO nova.compute.manager [-] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Took 1.32 seconds to deallocate network for instance. [ 782.745019] env[69994]: DEBUG nova.compute.utils [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 782.745019] env[69994]: DEBUG nova.compute.manager [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 782.745019] env[69994]: DEBUG nova.network.neutron [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 782.822657] env[69994]: DEBUG nova.policy [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '471a2a9e2b4a4d3da7935b67e87b0fe8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '30a9ea2f804f49ec8c5c6861b507454e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 782.898177] env[69994]: DEBUG oslo_vmware.api [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241689, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.115148} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.898177] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 782.898698] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08b8dcc8-b07d-4f79-a323-f3995ffdd2ed {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.925038] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Reconfiguring VM instance instance-0000002b to attach disk [datastore1] d4f87534-813e-4ff6-8b1f-ee23cb0b8e80/d4f87534-813e-4ff6-8b1f-ee23cb0b8e80.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 782.932814] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f1376558-ca77-458d-a4e3-9980432095e0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.961910] env[69994]: DEBUG oslo_vmware.api [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Task: {'id': task-3241690, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.964774] env[69994]: DEBUG oslo_vmware.api [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Waiting for the task: (returnval){ [ 782.964774] env[69994]: value = "task-3241691" [ 782.964774] env[69994]: _type = "Task" [ 782.964774] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.975027] env[69994]: DEBUG oslo_vmware.api [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241691, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.028273] env[69994]: DEBUG nova.compute.manager [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 783.118678] env[69994]: DEBUG nova.network.neutron [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 783.147984] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1a1722d4-fc75-4439-be98-c597bf65ea85 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 783.234188] env[69994]: DEBUG nova.network.neutron [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Successfully created port: 2ad3bbc3-0a2c-4d13-a9e1-cbc2bdca55c0 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 783.251105] env[69994]: DEBUG nova.compute.manager [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 783.394249] env[69994]: DEBUG nova.network.neutron [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Updating instance_info_cache with network_info: [{"id": "de3e77dc-7712-4e45-b1d3-fd50595cb0f2", "address": "fa:16:3e:1e:ef:15", "network": {"id": "36dc1c22-7ec0-41a6-a3ca-873fea58c351", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1496166079-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30a9ea2f804f49ec8c5c6861b507454e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde3e77dc-77", "ovs_interfaceid": "de3e77dc-7712-4e45-b1d3-fd50595cb0f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.432184] env[69994]: DEBUG oslo_vmware.api [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Task: {'id': task-3241690, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.835136} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.432352] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 86e514bb-8b47-4605-bd85-55c6c9874320/86e514bb-8b47-4605-bd85-55c6c9874320.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 783.432571] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 783.432996] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9e47f069-51b2-4f06-85e4-4885af552826 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.442462] env[69994]: DEBUG oslo_vmware.api [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Waiting for the task: (returnval){ [ 783.442462] env[69994]: value = "task-3241692" [ 783.442462] env[69994]: _type = "Task" [ 783.442462] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.452392] env[69994]: DEBUG oslo_vmware.api [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Task: {'id': task-3241692, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.476877] env[69994]: DEBUG oslo_vmware.api [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241691, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.548023] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d89fa10c-1b6f-4f84-852d-510f09b1faa9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.560572] env[69994]: DEBUG oslo_concurrency.lockutils [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 783.565159] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66f7a147-d96e-4ba7-bb7f-30a196e95032 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.612663] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1f0f372-7ddf-48f1-9283-9cda37b96ef3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.622188] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d475d022-a69e-477f-877c-eaa057b09ba5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.638708] env[69994]: DEBUG nova.compute.provider_tree [None req-a8ad7306-023f-4618-bd7d-525ece4c7641 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 783.732633] env[69994]: DEBUG nova.compute.manager [req-1f521a1b-6e12-49c9-b177-adcc9633c89d req-95a05f1b-cbcf-4225-9a2e-f5d95dc6f905 service nova] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Received event network-vif-plugged-de3e77dc-7712-4e45-b1d3-fd50595cb0f2 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 783.733208] env[69994]: DEBUG oslo_concurrency.lockutils [req-1f521a1b-6e12-49c9-b177-adcc9633c89d req-95a05f1b-cbcf-4225-9a2e-f5d95dc6f905 service nova] Acquiring lock "ed662f67-be0e-4f19-bb8a-6af39b4d348c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 783.733445] env[69994]: DEBUG oslo_concurrency.lockutils [req-1f521a1b-6e12-49c9-b177-adcc9633c89d req-95a05f1b-cbcf-4225-9a2e-f5d95dc6f905 service nova] Lock "ed662f67-be0e-4f19-bb8a-6af39b4d348c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 783.733618] env[69994]: DEBUG oslo_concurrency.lockutils [req-1f521a1b-6e12-49c9-b177-adcc9633c89d req-95a05f1b-cbcf-4225-9a2e-f5d95dc6f905 service nova] Lock "ed662f67-be0e-4f19-bb8a-6af39b4d348c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 783.733836] env[69994]: DEBUG nova.compute.manager [req-1f521a1b-6e12-49c9-b177-adcc9633c89d req-95a05f1b-cbcf-4225-9a2e-f5d95dc6f905 service nova] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] No waiting events found dispatching network-vif-plugged-de3e77dc-7712-4e45-b1d3-fd50595cb0f2 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 783.734015] env[69994]: WARNING nova.compute.manager [req-1f521a1b-6e12-49c9-b177-adcc9633c89d req-95a05f1b-cbcf-4225-9a2e-f5d95dc6f905 service nova] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Received unexpected event network-vif-plugged-de3e77dc-7712-4e45-b1d3-fd50595cb0f2 for instance with vm_state building and task_state spawning. [ 783.734211] env[69994]: DEBUG nova.compute.manager [req-1f521a1b-6e12-49c9-b177-adcc9633c89d req-95a05f1b-cbcf-4225-9a2e-f5d95dc6f905 service nova] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Received event network-changed-de3e77dc-7712-4e45-b1d3-fd50595cb0f2 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 783.734393] env[69994]: DEBUG nova.compute.manager [req-1f521a1b-6e12-49c9-b177-adcc9633c89d req-95a05f1b-cbcf-4225-9a2e-f5d95dc6f905 service nova] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Refreshing instance network info cache due to event network-changed-de3e77dc-7712-4e45-b1d3-fd50595cb0f2. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 783.734616] env[69994]: DEBUG oslo_concurrency.lockutils [req-1f521a1b-6e12-49c9-b177-adcc9633c89d req-95a05f1b-cbcf-4225-9a2e-f5d95dc6f905 service nova] Acquiring lock "refresh_cache-ed662f67-be0e-4f19-bb8a-6af39b4d348c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.898863] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Releasing lock "refresh_cache-ed662f67-be0e-4f19-bb8a-6af39b4d348c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 783.899521] env[69994]: DEBUG nova.compute.manager [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Instance network_info: |[{"id": "de3e77dc-7712-4e45-b1d3-fd50595cb0f2", "address": "fa:16:3e:1e:ef:15", "network": {"id": "36dc1c22-7ec0-41a6-a3ca-873fea58c351", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1496166079-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30a9ea2f804f49ec8c5c6861b507454e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde3e77dc-77", "ovs_interfaceid": "de3e77dc-7712-4e45-b1d3-fd50595cb0f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 783.899944] env[69994]: DEBUG oslo_concurrency.lockutils [req-1f521a1b-6e12-49c9-b177-adcc9633c89d req-95a05f1b-cbcf-4225-9a2e-f5d95dc6f905 service nova] Acquired lock "refresh_cache-ed662f67-be0e-4f19-bb8a-6af39b4d348c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 783.900253] env[69994]: DEBUG nova.network.neutron [req-1f521a1b-6e12-49c9-b177-adcc9633c89d req-95a05f1b-cbcf-4225-9a2e-f5d95dc6f905 service nova] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Refreshing network info cache for port de3e77dc-7712-4e45-b1d3-fd50595cb0f2 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 783.904171] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:ef:15', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca83c3bc-f3ec-42ab-85b3-192512f766f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'de3e77dc-7712-4e45-b1d3-fd50595cb0f2', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 783.914089] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Creating folder: Project (30a9ea2f804f49ec8c5c6861b507454e). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 783.914221] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e89f3146-4753-4293-82b1-5f35f6d6877d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.926088] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Created folder: Project (30a9ea2f804f49ec8c5c6861b507454e) in parent group-v647729. [ 783.926088] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Creating folder: Instances. Parent ref: group-v647865. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 783.926213] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b33c6f92-5eb0-4836-8c54-b5f453cfad0e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.935448] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Created folder: Instances in parent group-v647865. [ 783.935726] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 783.935918] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 783.936161] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a37be1fd-66d4-4cfb-887e-9ac1ce4fef49 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.962534] env[69994]: DEBUG oslo_vmware.api [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Task: {'id': task-3241692, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.200445} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.964084] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 783.964340] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 783.964340] env[69994]: value = "task-3241696" [ 783.964340] env[69994]: _type = "Task" [ 783.964340] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.965107] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3673af9-8360-4fc0-b516-f4d8035bfe74 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.995500] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Reconfiguring VM instance instance-0000002c to attach disk [datastore2] 86e514bb-8b47-4605-bd85-55c6c9874320/86e514bb-8b47-4605-bd85-55c6c9874320.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 784.002849] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bc017702-94f2-45b5-afb3-42216ca2238a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.018631] env[69994]: DEBUG oslo_vmware.api [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241691, 'name': ReconfigVM_Task, 'duration_secs': 0.744176} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.018879] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241696, 'name': CreateVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.019182] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Reconfigured VM instance instance-0000002b to attach disk [datastore1] d4f87534-813e-4ff6-8b1f-ee23cb0b8e80/d4f87534-813e-4ff6-8b1f-ee23cb0b8e80.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 784.020343] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6740e842-3514-41fe-8f70-f7fd1a05f238 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.027882] env[69994]: DEBUG oslo_vmware.api [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Waiting for the task: (returnval){ [ 784.027882] env[69994]: value = "task-3241697" [ 784.027882] env[69994]: _type = "Task" [ 784.027882] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.029234] env[69994]: DEBUG oslo_vmware.api [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Waiting for the task: (returnval){ [ 784.029234] env[69994]: value = "task-3241698" [ 784.029234] env[69994]: _type = "Task" [ 784.029234] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.040835] env[69994]: DEBUG oslo_vmware.api [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Task: {'id': task-3241697, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.044040] env[69994]: DEBUG oslo_vmware.api [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241698, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.142814] env[69994]: DEBUG nova.scheduler.client.report [None req-a8ad7306-023f-4618-bd7d-525ece4c7641 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 784.272556] env[69994]: DEBUG nova.compute.manager [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 784.295342] env[69994]: DEBUG nova.compute.manager [req-fa352013-5244-4bca-a700-e3bde88cf9ca req-abdcfb11-011c-44db-88e3-fbabda7116e5 service nova] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Received event network-changed-d451c9f0-137b-44de-a79c-ec92c6f843bc {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 784.295546] env[69994]: DEBUG nova.compute.manager [req-fa352013-5244-4bca-a700-e3bde88cf9ca req-abdcfb11-011c-44db-88e3-fbabda7116e5 service nova] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Refreshing instance network info cache due to event network-changed-d451c9f0-137b-44de-a79c-ec92c6f843bc. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 784.295777] env[69994]: DEBUG oslo_concurrency.lockutils [req-fa352013-5244-4bca-a700-e3bde88cf9ca req-abdcfb11-011c-44db-88e3-fbabda7116e5 service nova] Acquiring lock "refresh_cache-1d548f54-4ffa-4299-9212-717350558ad4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 784.295926] env[69994]: DEBUG oslo_concurrency.lockutils [req-fa352013-5244-4bca-a700-e3bde88cf9ca req-abdcfb11-011c-44db-88e3-fbabda7116e5 service nova] Acquired lock "refresh_cache-1d548f54-4ffa-4299-9212-717350558ad4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 784.296102] env[69994]: DEBUG nova.network.neutron [req-fa352013-5244-4bca-a700-e3bde88cf9ca req-abdcfb11-011c-44db-88e3-fbabda7116e5 service nova] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Refreshing network info cache for port d451c9f0-137b-44de-a79c-ec92c6f843bc {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 784.309910] env[69994]: DEBUG nova.virt.hardware [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 784.310175] env[69994]: DEBUG nova.virt.hardware [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 784.310330] env[69994]: DEBUG nova.virt.hardware [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 784.310585] env[69994]: DEBUG nova.virt.hardware [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 784.310800] env[69994]: DEBUG nova.virt.hardware [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 784.310979] env[69994]: DEBUG nova.virt.hardware [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 784.311358] env[69994]: DEBUG nova.virt.hardware [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 784.311530] env[69994]: DEBUG nova.virt.hardware [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 784.311701] env[69994]: DEBUG nova.virt.hardware [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 784.311864] env[69994]: DEBUG nova.virt.hardware [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 784.312049] env[69994]: DEBUG nova.virt.hardware [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 784.312905] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b55ef8d3-9ae1-493e-9ebf-5b6ee8cb17b3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.322825] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e40c1b5-246c-44fb-95a8-688fff95c477 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.478788] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241696, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.546695] env[69994]: DEBUG oslo_vmware.api [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Task: {'id': task-3241697, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.552841] env[69994]: DEBUG oslo_vmware.api [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241698, 'name': Rename_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.647903] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a8ad7306-023f-4618-bd7d-525ece4c7641 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.409s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 784.653335] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c78d3c1c-0a6c-4f60-a8a6-546ffd57861f tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.040s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 784.653335] env[69994]: DEBUG nova.objects.instance [None req-c78d3c1c-0a6c-4f60-a8a6-546ffd57861f tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Lazy-loading 'resources' on Instance uuid ce6f9a88-faa8-442e-8b48-64979dd2d03e {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 784.679503] env[69994]: INFO nova.scheduler.client.report [None req-a8ad7306-023f-4618-bd7d-525ece4c7641 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Deleted allocations for instance 7e7953f7-ed5d-4515-9181-93d343ad772d [ 784.851768] env[69994]: DEBUG nova.network.neutron [req-1f521a1b-6e12-49c9-b177-adcc9633c89d req-95a05f1b-cbcf-4225-9a2e-f5d95dc6f905 service nova] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Updated VIF entry in instance network info cache for port de3e77dc-7712-4e45-b1d3-fd50595cb0f2. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 784.852174] env[69994]: DEBUG nova.network.neutron [req-1f521a1b-6e12-49c9-b177-adcc9633c89d req-95a05f1b-cbcf-4225-9a2e-f5d95dc6f905 service nova] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Updating instance_info_cache with network_info: [{"id": "de3e77dc-7712-4e45-b1d3-fd50595cb0f2", "address": "fa:16:3e:1e:ef:15", "network": {"id": "36dc1c22-7ec0-41a6-a3ca-873fea58c351", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1496166079-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30a9ea2f804f49ec8c5c6861b507454e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde3e77dc-77", "ovs_interfaceid": "de3e77dc-7712-4e45-b1d3-fd50595cb0f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.914159] env[69994]: DEBUG nova.network.neutron [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Successfully updated port: 2ad3bbc3-0a2c-4d13-a9e1-cbc2bdca55c0 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 784.981641] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241696, 'name': CreateVM_Task, 'duration_secs': 0.698378} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.981834] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 784.982576] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 784.982749] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 784.983088] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 784.983360] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25a72c10-3e87-45e5-9850-a385f0d5410e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.987964] env[69994]: DEBUG oslo_vmware.api [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 784.987964] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52fc575b-e7d9-1b1a-ab09-739e8521b973" [ 784.987964] env[69994]: _type = "Task" [ 784.987964] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.996793] env[69994]: DEBUG oslo_vmware.api [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52fc575b-e7d9-1b1a-ab09-739e8521b973, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.043321] env[69994]: DEBUG oslo_vmware.api [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Task: {'id': task-3241697, 'name': ReconfigVM_Task, 'duration_secs': 0.701918} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.044009] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Reconfigured VM instance instance-0000002c to attach disk [datastore2] 86e514bb-8b47-4605-bd85-55c6c9874320/86e514bb-8b47-4605-bd85-55c6c9874320.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 785.044645] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-354a9731-9e35-4178-8a8a-82beeb9dcfb1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.049142] env[69994]: DEBUG oslo_vmware.api [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241698, 'name': Rename_Task, 'duration_secs': 0.938866} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.049857] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 785.050109] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-10af91fe-aac5-414d-9b50-d53e4928045c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.053712] env[69994]: DEBUG oslo_vmware.api [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Waiting for the task: (returnval){ [ 785.053712] env[69994]: value = "task-3241699" [ 785.053712] env[69994]: _type = "Task" [ 785.053712] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.058745] env[69994]: DEBUG oslo_vmware.api [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Waiting for the task: (returnval){ [ 785.058745] env[69994]: value = "task-3241700" [ 785.058745] env[69994]: _type = "Task" [ 785.058745] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.067419] env[69994]: DEBUG oslo_vmware.api [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Task: {'id': task-3241699, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.071576] env[69994]: DEBUG oslo_vmware.api [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241700, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.200056] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a8ad7306-023f-4618-bd7d-525ece4c7641 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Lock "7e7953f7-ed5d-4515-9181-93d343ad772d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.284s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 785.229976] env[69994]: DEBUG nova.network.neutron [req-fa352013-5244-4bca-a700-e3bde88cf9ca req-abdcfb11-011c-44db-88e3-fbabda7116e5 service nova] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Updated VIF entry in instance network info cache for port d451c9f0-137b-44de-a79c-ec92c6f843bc. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 785.229976] env[69994]: DEBUG nova.network.neutron [req-fa352013-5244-4bca-a700-e3bde88cf9ca req-abdcfb11-011c-44db-88e3-fbabda7116e5 service nova] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Updating instance_info_cache with network_info: [{"id": "d451c9f0-137b-44de-a79c-ec92c6f843bc", "address": "fa:16:3e:c1:68:d9", "network": {"id": "a40a358d-770e-4ef2-a47e-4e4a4532f844", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1348265973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97b5a4565fa644a4a510beb5ba006afb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd451c9f0-13", "ovs_interfaceid": "d451c9f0-137b-44de-a79c-ec92c6f843bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 785.356709] env[69994]: DEBUG oslo_concurrency.lockutils [req-1f521a1b-6e12-49c9-b177-adcc9633c89d req-95a05f1b-cbcf-4225-9a2e-f5d95dc6f905 service nova] Releasing lock "refresh_cache-ed662f67-be0e-4f19-bb8a-6af39b4d348c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 785.417256] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquiring lock "refresh_cache-b99b73e6-3348-4d5d-aa57-f01ace0bfc42" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.417418] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquired lock "refresh_cache-b99b73e6-3348-4d5d-aa57-f01ace0bfc42" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 785.417554] env[69994]: DEBUG nova.network.neutron [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 785.501348] env[69994]: DEBUG oslo_vmware.api [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52fc575b-e7d9-1b1a-ab09-739e8521b973, 'name': SearchDatastore_Task, 'duration_secs': 0.026462} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.501650] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 785.501894] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 785.502154] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.502302] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 785.502482] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 785.502747] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6de5c22c-3660-4907-9654-9ed931bf9caa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.515424] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 785.515706] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 785.516475] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29091a51-cfa1-4569-b2bf-d598ad9828e8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.524149] env[69994]: DEBUG oslo_vmware.api [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 785.524149] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]526c0249-8274-a9f0-5df8-1366a945c25e" [ 785.524149] env[69994]: _type = "Task" [ 785.524149] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.531788] env[69994]: DEBUG oslo_vmware.api [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]526c0249-8274-a9f0-5df8-1366a945c25e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.567906] env[69994]: DEBUG oslo_vmware.api [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Task: {'id': task-3241699, 'name': Rename_Task, 'duration_secs': 0.259082} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.568585] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 785.568838] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9b4333d0-f0b7-4951-9835-d6f9d67936ad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.573368] env[69994]: DEBUG oslo_vmware.api [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241700, 'name': PowerOnVM_Task} progress is 87%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.579821] env[69994]: DEBUG oslo_vmware.api [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Waiting for the task: (returnval){ [ 785.579821] env[69994]: value = "task-3241701" [ 785.579821] env[69994]: _type = "Task" [ 785.579821] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.587278] env[69994]: DEBUG oslo_vmware.api [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Task: {'id': task-3241701, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.733852] env[69994]: DEBUG oslo_concurrency.lockutils [req-fa352013-5244-4bca-a700-e3bde88cf9ca req-abdcfb11-011c-44db-88e3-fbabda7116e5 service nova] Releasing lock "refresh_cache-1d548f54-4ffa-4299-9212-717350558ad4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 785.791535] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8ea4ae9-150b-4c18-8496-6c407fd25d91 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.799928] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7540417-b624-4054-b72f-708334e032f3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.834401] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-085252a2-7f8b-471c-b932-2b2954c715b7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.843136] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49e83417-e484-49b0-a14e-ee6d1e727980 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.858659] env[69994]: DEBUG nova.compute.provider_tree [None req-c78d3c1c-0a6c-4f60-a8a6-546ffd57861f tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 785.974987] env[69994]: DEBUG nova.network.neutron [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 786.036813] env[69994]: DEBUG oslo_vmware.api [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]526c0249-8274-a9f0-5df8-1366a945c25e, 'name': SearchDatastore_Task, 'duration_secs': 0.03454} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.037650] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d65b97f5-0193-409c-9071-8716190ede8c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.043107] env[69994]: DEBUG oslo_vmware.api [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 786.043107] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52f5f4d0-cf38-12d8-f97e-f8ebf7715cd5" [ 786.043107] env[69994]: _type = "Task" [ 786.043107] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.051555] env[69994]: DEBUG oslo_vmware.api [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52f5f4d0-cf38-12d8-f97e-f8ebf7715cd5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.069925] env[69994]: DEBUG oslo_vmware.api [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241700, 'name': PowerOnVM_Task, 'duration_secs': 0.796464} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.070201] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 786.070407] env[69994]: INFO nova.compute.manager [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Took 10.34 seconds to spawn the instance on the hypervisor. [ 786.070617] env[69994]: DEBUG nova.compute.manager [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 786.074344] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad847fbe-9a6b-4bc3-93e2-60376c78fc9a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.091465] env[69994]: DEBUG nova.compute.manager [req-93112a0b-8e9a-41fa-8f55-6c0cdca1b203 req-28c23029-a11f-4a4b-b468-fb9b61c04f59 service nova] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Received event network-vif-plugged-2ad3bbc3-0a2c-4d13-a9e1-cbc2bdca55c0 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 786.091707] env[69994]: DEBUG oslo_concurrency.lockutils [req-93112a0b-8e9a-41fa-8f55-6c0cdca1b203 req-28c23029-a11f-4a4b-b468-fb9b61c04f59 service nova] Acquiring lock "b99b73e6-3348-4d5d-aa57-f01ace0bfc42-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 786.091910] env[69994]: DEBUG oslo_concurrency.lockutils [req-93112a0b-8e9a-41fa-8f55-6c0cdca1b203 req-28c23029-a11f-4a4b-b468-fb9b61c04f59 service nova] Lock "b99b73e6-3348-4d5d-aa57-f01ace0bfc42-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 786.092104] env[69994]: DEBUG oslo_concurrency.lockutils [req-93112a0b-8e9a-41fa-8f55-6c0cdca1b203 req-28c23029-a11f-4a4b-b468-fb9b61c04f59 service nova] Lock "b99b73e6-3348-4d5d-aa57-f01ace0bfc42-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 786.093509] env[69994]: DEBUG nova.compute.manager [req-93112a0b-8e9a-41fa-8f55-6c0cdca1b203 req-28c23029-a11f-4a4b-b468-fb9b61c04f59 service nova] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] No waiting events found dispatching network-vif-plugged-2ad3bbc3-0a2c-4d13-a9e1-cbc2bdca55c0 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 786.093509] env[69994]: WARNING nova.compute.manager [req-93112a0b-8e9a-41fa-8f55-6c0cdca1b203 req-28c23029-a11f-4a4b-b468-fb9b61c04f59 service nova] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Received unexpected event network-vif-plugged-2ad3bbc3-0a2c-4d13-a9e1-cbc2bdca55c0 for instance with vm_state building and task_state spawning. [ 786.093509] env[69994]: DEBUG nova.compute.manager [req-93112a0b-8e9a-41fa-8f55-6c0cdca1b203 req-28c23029-a11f-4a4b-b468-fb9b61c04f59 service nova] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Received event network-changed-2ad3bbc3-0a2c-4d13-a9e1-cbc2bdca55c0 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 786.093509] env[69994]: DEBUG nova.compute.manager [req-93112a0b-8e9a-41fa-8f55-6c0cdca1b203 req-28c23029-a11f-4a4b-b468-fb9b61c04f59 service nova] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Refreshing instance network info cache due to event network-changed-2ad3bbc3-0a2c-4d13-a9e1-cbc2bdca55c0. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 786.093509] env[69994]: DEBUG oslo_concurrency.lockutils [req-93112a0b-8e9a-41fa-8f55-6c0cdca1b203 req-28c23029-a11f-4a4b-b468-fb9b61c04f59 service nova] Acquiring lock "refresh_cache-b99b73e6-3348-4d5d-aa57-f01ace0bfc42" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.098693] env[69994]: DEBUG oslo_vmware.api [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Task: {'id': task-3241701, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.363313] env[69994]: DEBUG nova.scheduler.client.report [None req-c78d3c1c-0a6c-4f60-a8a6-546ffd57861f tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 786.504451] env[69994]: DEBUG nova.network.neutron [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Updating instance_info_cache with network_info: [{"id": "2ad3bbc3-0a2c-4d13-a9e1-cbc2bdca55c0", "address": "fa:16:3e:52:d0:9b", "network": {"id": "36dc1c22-7ec0-41a6-a3ca-873fea58c351", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1496166079-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30a9ea2f804f49ec8c5c6861b507454e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ad3bbc3-0a", "ovs_interfaceid": "2ad3bbc3-0a2c-4d13-a9e1-cbc2bdca55c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.554975] env[69994]: DEBUG oslo_vmware.api [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52f5f4d0-cf38-12d8-f97e-f8ebf7715cd5, 'name': SearchDatastore_Task, 'duration_secs': 0.048656} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.555295] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 786.555522] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] ed662f67-be0e-4f19-bb8a-6af39b4d348c/ed662f67-be0e-4f19-bb8a-6af39b4d348c.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 786.555784] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-656cd7aa-7290-4eaa-ab45-35ffe4d55b36 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.565432] env[69994]: DEBUG oslo_vmware.api [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 786.565432] env[69994]: value = "task-3241703" [ 786.565432] env[69994]: _type = "Task" [ 786.565432] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.573336] env[69994]: DEBUG oslo_vmware.api [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241703, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.590490] env[69994]: DEBUG oslo_vmware.api [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Task: {'id': task-3241701, 'name': PowerOnVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.599694] env[69994]: INFO nova.compute.manager [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Took 50.07 seconds to build instance. [ 786.872694] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c78d3c1c-0a6c-4f60-a8a6-546ffd57861f tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.222s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 786.878543] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.630s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 786.879650] env[69994]: INFO nova.compute.claims [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 786.902025] env[69994]: INFO nova.scheduler.client.report [None req-c78d3c1c-0a6c-4f60-a8a6-546ffd57861f tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Deleted allocations for instance ce6f9a88-faa8-442e-8b48-64979dd2d03e [ 787.007884] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Releasing lock "refresh_cache-b99b73e6-3348-4d5d-aa57-f01ace0bfc42" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 787.008406] env[69994]: DEBUG nova.compute.manager [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Instance network_info: |[{"id": "2ad3bbc3-0a2c-4d13-a9e1-cbc2bdca55c0", "address": "fa:16:3e:52:d0:9b", "network": {"id": "36dc1c22-7ec0-41a6-a3ca-873fea58c351", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1496166079-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30a9ea2f804f49ec8c5c6861b507454e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ad3bbc3-0a", "ovs_interfaceid": "2ad3bbc3-0a2c-4d13-a9e1-cbc2bdca55c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 787.008603] env[69994]: DEBUG oslo_concurrency.lockutils [req-93112a0b-8e9a-41fa-8f55-6c0cdca1b203 req-28c23029-a11f-4a4b-b468-fb9b61c04f59 service nova] Acquired lock "refresh_cache-b99b73e6-3348-4d5d-aa57-f01ace0bfc42" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 787.008790] env[69994]: DEBUG nova.network.neutron [req-93112a0b-8e9a-41fa-8f55-6c0cdca1b203 req-28c23029-a11f-4a4b-b468-fb9b61c04f59 service nova] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Refreshing network info cache for port 2ad3bbc3-0a2c-4d13-a9e1-cbc2bdca55c0 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 787.010048] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:d0:9b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca83c3bc-f3ec-42ab-85b3-192512f766f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2ad3bbc3-0a2c-4d13-a9e1-cbc2bdca55c0', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 787.019149] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 787.020846] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 787.022198] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cb2d600e-38f3-48b7-97ad-845b0e6a8729 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.039137] env[69994]: DEBUG nova.compute.manager [req-3310317f-0a32-429b-b05f-d6448e991fb7 req-56befad5-ca2e-46cb-8ac6-081beb28a9ea service nova] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Received event network-changed-d451c9f0-137b-44de-a79c-ec92c6f843bc {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 787.039340] env[69994]: DEBUG nova.compute.manager [req-3310317f-0a32-429b-b05f-d6448e991fb7 req-56befad5-ca2e-46cb-8ac6-081beb28a9ea service nova] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Refreshing instance network info cache due to event network-changed-d451c9f0-137b-44de-a79c-ec92c6f843bc. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 787.039570] env[69994]: DEBUG oslo_concurrency.lockutils [req-3310317f-0a32-429b-b05f-d6448e991fb7 req-56befad5-ca2e-46cb-8ac6-081beb28a9ea service nova] Acquiring lock "refresh_cache-1d548f54-4ffa-4299-9212-717350558ad4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.039727] env[69994]: DEBUG oslo_concurrency.lockutils [req-3310317f-0a32-429b-b05f-d6448e991fb7 req-56befad5-ca2e-46cb-8ac6-081beb28a9ea service nova] Acquired lock "refresh_cache-1d548f54-4ffa-4299-9212-717350558ad4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 787.040537] env[69994]: DEBUG nova.network.neutron [req-3310317f-0a32-429b-b05f-d6448e991fb7 req-56befad5-ca2e-46cb-8ac6-081beb28a9ea service nova] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Refreshing network info cache for port d451c9f0-137b-44de-a79c-ec92c6f843bc {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 787.049101] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 787.049101] env[69994]: value = "task-3241704" [ 787.049101] env[69994]: _type = "Task" [ 787.049101] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.059707] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241704, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.082991] env[69994]: DEBUG oslo_vmware.api [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241703, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.095077] env[69994]: DEBUG oslo_vmware.api [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Task: {'id': task-3241701, 'name': PowerOnVM_Task, 'duration_secs': 1.12303} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.095351] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 787.095550] env[69994]: INFO nova.compute.manager [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Took 8.56 seconds to spawn the instance on the hypervisor. [ 787.095831] env[69994]: DEBUG nova.compute.manager [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 787.096820] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-233248b8-ec41-4bce-af04-34e0a6767704 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.102171] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ead61415-4e9c-4484-aa55-8f715ee23abd tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Lock "d4f87534-813e-4ff6-8b1f-ee23cb0b8e80" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 106.335s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 787.413657] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c78d3c1c-0a6c-4f60-a8a6-546ffd57861f tempest-ServerRescueTestJSONUnderV235-1147698067 tempest-ServerRescueTestJSONUnderV235-1147698067-project-member] Lock "ce6f9a88-faa8-442e-8b48-64979dd2d03e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.483s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 787.561221] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241704, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.571145] env[69994]: DEBUG oslo_concurrency.lockutils [None req-db548667-33dc-4cc5-a718-8be95fc62592 tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Acquiring lock "cef66a67-e3ac-40dc-a8a4-0375bd64c484" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 787.571339] env[69994]: DEBUG oslo_concurrency.lockutils [None req-db548667-33dc-4cc5-a718-8be95fc62592 tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Lock "cef66a67-e3ac-40dc-a8a4-0375bd64c484" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 787.571581] env[69994]: DEBUG oslo_concurrency.lockutils [None req-db548667-33dc-4cc5-a718-8be95fc62592 tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Acquiring lock "cef66a67-e3ac-40dc-a8a4-0375bd64c484-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 787.571768] env[69994]: DEBUG oslo_concurrency.lockutils [None req-db548667-33dc-4cc5-a718-8be95fc62592 tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Lock "cef66a67-e3ac-40dc-a8a4-0375bd64c484-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 787.571960] env[69994]: DEBUG oslo_concurrency.lockutils [None req-db548667-33dc-4cc5-a718-8be95fc62592 tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Lock "cef66a67-e3ac-40dc-a8a4-0375bd64c484-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 787.574226] env[69994]: INFO nova.compute.manager [None req-db548667-33dc-4cc5-a718-8be95fc62592 tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Terminating instance [ 787.582955] env[69994]: DEBUG oslo_vmware.api [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241703, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.739809} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.586969] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] ed662f67-be0e-4f19-bb8a-6af39b4d348c/ed662f67-be0e-4f19-bb8a-6af39b4d348c.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 787.587470] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 787.588207] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dab5b03b-fb06-4525-964d-cbee4aa2750e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.598414] env[69994]: DEBUG oslo_vmware.api [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 787.598414] env[69994]: value = "task-3241705" [ 787.598414] env[69994]: _type = "Task" [ 787.598414] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.608638] env[69994]: DEBUG oslo_vmware.api [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241705, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.609022] env[69994]: DEBUG nova.compute.manager [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 787.625017] env[69994]: INFO nova.compute.manager [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Took 42.63 seconds to build instance. [ 787.806802] env[69994]: DEBUG nova.network.neutron [req-3310317f-0a32-429b-b05f-d6448e991fb7 req-56befad5-ca2e-46cb-8ac6-081beb28a9ea service nova] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Updated VIF entry in instance network info cache for port d451c9f0-137b-44de-a79c-ec92c6f843bc. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 787.807194] env[69994]: DEBUG nova.network.neutron [req-3310317f-0a32-429b-b05f-d6448e991fb7 req-56befad5-ca2e-46cb-8ac6-081beb28a9ea service nova] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Updating instance_info_cache with network_info: [{"id": "d451c9f0-137b-44de-a79c-ec92c6f843bc", "address": "fa:16:3e:c1:68:d9", "network": {"id": "a40a358d-770e-4ef2-a47e-4e4a4532f844", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1348265973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97b5a4565fa644a4a510beb5ba006afb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd451c9f0-13", "ovs_interfaceid": "d451c9f0-137b-44de-a79c-ec92c6f843bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.035293] env[69994]: DEBUG nova.network.neutron [req-93112a0b-8e9a-41fa-8f55-6c0cdca1b203 req-28c23029-a11f-4a4b-b468-fb9b61c04f59 service nova] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Updated VIF entry in instance network info cache for port 2ad3bbc3-0a2c-4d13-a9e1-cbc2bdca55c0. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 788.035671] env[69994]: DEBUG nova.network.neutron [req-93112a0b-8e9a-41fa-8f55-6c0cdca1b203 req-28c23029-a11f-4a4b-b468-fb9b61c04f59 service nova] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Updating instance_info_cache with network_info: [{"id": "2ad3bbc3-0a2c-4d13-a9e1-cbc2bdca55c0", "address": "fa:16:3e:52:d0:9b", "network": {"id": "36dc1c22-7ec0-41a6-a3ca-873fea58c351", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1496166079-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30a9ea2f804f49ec8c5c6861b507454e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ad3bbc3-0a", "ovs_interfaceid": "2ad3bbc3-0a2c-4d13-a9e1-cbc2bdca55c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.062224] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241704, 'name': CreateVM_Task, 'duration_secs': 0.677414} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.068946] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 788.069927] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.070032] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 788.070349] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 788.070855] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a9fd143-e47c-4f2a-81be-3f770027e780 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.077893] env[69994]: DEBUG nova.compute.manager [None req-db548667-33dc-4cc5-a718-8be95fc62592 tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 788.078125] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-db548667-33dc-4cc5-a718-8be95fc62592 tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 788.078476] env[69994]: DEBUG oslo_vmware.api [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 788.078476] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52c770ae-085c-7bb7-b3f2-3fd0e35e0496" [ 788.078476] env[69994]: _type = "Task" [ 788.078476] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.081755] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56ccf89e-94c3-4346-af5a-4cec43236200 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.092122] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-db548667-33dc-4cc5-a718-8be95fc62592 tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 788.095212] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-92cc5c2f-e604-4f8e-bea1-2dc6417b17a1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.096952] env[69994]: DEBUG oslo_vmware.api [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52c770ae-085c-7bb7-b3f2-3fd0e35e0496, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.109064] env[69994]: DEBUG oslo_vmware.api [None req-db548667-33dc-4cc5-a718-8be95fc62592 tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Waiting for the task: (returnval){ [ 788.109064] env[69994]: value = "task-3241707" [ 788.109064] env[69994]: _type = "Task" [ 788.109064] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.119956] env[69994]: DEBUG oslo_vmware.api [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241705, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.114603} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.121351] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 788.122695] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-666fdc8b-d668-416a-bb3e-8e485cb25cf1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.129834] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f4bc1663-5b0c-4964-903d-a6d97fd304de tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Lock "86e514bb-8b47-4605-bd85-55c6c9874320" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 107.089s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 788.132614] env[69994]: DEBUG oslo_vmware.api [None req-db548667-33dc-4cc5-a718-8be95fc62592 tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Task: {'id': task-3241707, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.160180] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Reconfiguring VM instance instance-0000002d to attach disk [datastore2] ed662f67-be0e-4f19-bb8a-6af39b4d348c/ed662f67-be0e-4f19-bb8a-6af39b4d348c.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 788.162175] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 788.164911] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8ae795cf-f842-4b23-a30a-71d50aa778cf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.188042] env[69994]: DEBUG oslo_vmware.api [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 788.188042] env[69994]: value = "task-3241708" [ 788.188042] env[69994]: _type = "Task" [ 788.188042] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.198139] env[69994]: DEBUG oslo_vmware.api [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241708, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.311449] env[69994]: DEBUG oslo_concurrency.lockutils [req-3310317f-0a32-429b-b05f-d6448e991fb7 req-56befad5-ca2e-46cb-8ac6-081beb28a9ea service nova] Releasing lock "refresh_cache-1d548f54-4ffa-4299-9212-717350558ad4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 788.311807] env[69994]: DEBUG nova.compute.manager [req-3310317f-0a32-429b-b05f-d6448e991fb7 req-56befad5-ca2e-46cb-8ac6-081beb28a9ea service nova] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Received event network-changed-de62ca8d-e627-414e-a2b0-e988e91c52d0 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 788.311963] env[69994]: DEBUG nova.compute.manager [req-3310317f-0a32-429b-b05f-d6448e991fb7 req-56befad5-ca2e-46cb-8ac6-081beb28a9ea service nova] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Refreshing instance network info cache due to event network-changed-de62ca8d-e627-414e-a2b0-e988e91c52d0. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 788.312109] env[69994]: DEBUG oslo_concurrency.lockutils [req-3310317f-0a32-429b-b05f-d6448e991fb7 req-56befad5-ca2e-46cb-8ac6-081beb28a9ea service nova] Acquiring lock "refresh_cache-0b284e71-7af2-4782-b950-4f7eac5221a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.312498] env[69994]: DEBUG oslo_concurrency.lockutils [req-3310317f-0a32-429b-b05f-d6448e991fb7 req-56befad5-ca2e-46cb-8ac6-081beb28a9ea service nova] Acquired lock "refresh_cache-0b284e71-7af2-4782-b950-4f7eac5221a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 788.312694] env[69994]: DEBUG nova.network.neutron [req-3310317f-0a32-429b-b05f-d6448e991fb7 req-56befad5-ca2e-46cb-8ac6-081beb28a9ea service nova] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Refreshing network info cache for port de62ca8d-e627-414e-a2b0-e988e91c52d0 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 788.402419] env[69994]: DEBUG nova.compute.manager [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 788.403738] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70c0ac35-3196-426d-b234-5cc5cc5adba0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.517960] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd090da8-919e-4cbf-9324-15c4f2fe396d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.525505] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b7f3fe-5655-416c-b248-f660ef0f4510 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.557169] env[69994]: DEBUG oslo_concurrency.lockutils [req-93112a0b-8e9a-41fa-8f55-6c0cdca1b203 req-28c23029-a11f-4a4b-b468-fb9b61c04f59 service nova] Releasing lock "refresh_cache-b99b73e6-3348-4d5d-aa57-f01ace0bfc42" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 788.558076] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a06fe0d4-a6de-4cfc-9941-3d44f2a1dc29 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.565679] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78641cee-d003-431b-9129-dec425c04add {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.581064] env[69994]: DEBUG nova.compute.provider_tree [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 788.600850] env[69994]: DEBUG oslo_vmware.api [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52c770ae-085c-7bb7-b3f2-3fd0e35e0496, 'name': SearchDatastore_Task, 'duration_secs': 0.033348} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.601346] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 788.601698] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 788.602457] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.602457] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 788.602696] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 788.603056] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-25f28a53-ad22-4a52-82e7-d55d7b6e8c3d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.614760] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 788.614960] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 788.616050] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-617629e5-60f4-4ce2-ad86-47f2d3057805 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.621183] env[69994]: DEBUG oslo_vmware.api [None req-db548667-33dc-4cc5-a718-8be95fc62592 tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Task: {'id': task-3241707, 'name': PowerOffVM_Task, 'duration_secs': 0.234734} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.621775] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-db548667-33dc-4cc5-a718-8be95fc62592 tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 788.621943] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-db548667-33dc-4cc5-a718-8be95fc62592 tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 788.622194] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5aa9b12c-b075-46bb-8019-0eeef8b1c8e4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.624813] env[69994]: DEBUG oslo_vmware.api [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 788.624813] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]521f6194-53ad-f9ed-5ab9-8a78c1f90e99" [ 788.624813] env[69994]: _type = "Task" [ 788.624813] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.635235] env[69994]: DEBUG oslo_vmware.api [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521f6194-53ad-f9ed-5ab9-8a78c1f90e99, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.638827] env[69994]: DEBUG nova.compute.manager [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 788.697023] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-db548667-33dc-4cc5-a718-8be95fc62592 tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 788.697023] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-db548667-33dc-4cc5-a718-8be95fc62592 tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 788.697023] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-db548667-33dc-4cc5-a718-8be95fc62592 tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Deleting the datastore file [datastore2] cef66a67-e3ac-40dc-a8a4-0375bd64c484 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 788.697023] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-05bd23d0-7d6b-45a0-9a69-0860f6c1b320 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.706039] env[69994]: DEBUG oslo_vmware.api [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241708, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.710055] env[69994]: DEBUG oslo_vmware.api [None req-db548667-33dc-4cc5-a718-8be95fc62592 tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Waiting for the task: (returnval){ [ 788.710055] env[69994]: value = "task-3241710" [ 788.710055] env[69994]: _type = "Task" [ 788.710055] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.719973] env[69994]: DEBUG oslo_vmware.api [None req-db548667-33dc-4cc5-a718-8be95fc62592 tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Task: {'id': task-3241710, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.917643] env[69994]: INFO nova.compute.manager [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] instance snapshotting [ 788.918364] env[69994]: DEBUG nova.objects.instance [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lazy-loading 'flavor' on Instance uuid 45a8dced-6c49-441c-92e2-ee323ed8753c {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 789.085109] env[69994]: DEBUG nova.scheduler.client.report [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 789.136706] env[69994]: DEBUG oslo_vmware.api [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521f6194-53ad-f9ed-5ab9-8a78c1f90e99, 'name': SearchDatastore_Task, 'duration_secs': 0.032844} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.137543] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-117eeaa0-cbed-42e2-bb75-b163d86dd2d4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.145348] env[69994]: DEBUG oslo_vmware.api [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 789.145348] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]526f8a2d-7482-834b-c0bd-a5e23fdddbdf" [ 789.145348] env[69994]: _type = "Task" [ 789.145348] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.159352] env[69994]: DEBUG oslo_vmware.api [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]526f8a2d-7482-834b-c0bd-a5e23fdddbdf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.166523] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 789.198141] env[69994]: DEBUG oslo_vmware.api [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241708, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.223322] env[69994]: DEBUG oslo_vmware.api [None req-db548667-33dc-4cc5-a718-8be95fc62592 tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Task: {'id': task-3241710, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.434223} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.223582] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-db548667-33dc-4cc5-a718-8be95fc62592 tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 789.223778] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-db548667-33dc-4cc5-a718-8be95fc62592 tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 789.223951] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-db548667-33dc-4cc5-a718-8be95fc62592 tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 789.224142] env[69994]: INFO nova.compute.manager [None req-db548667-33dc-4cc5-a718-8be95fc62592 tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Took 1.15 seconds to destroy the instance on the hypervisor. [ 789.224930] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-db548667-33dc-4cc5-a718-8be95fc62592 tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 789.225032] env[69994]: DEBUG nova.compute.manager [-] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 789.225073] env[69994]: DEBUG nova.network.neutron [-] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 789.241661] env[69994]: DEBUG nova.network.neutron [req-3310317f-0a32-429b-b05f-d6448e991fb7 req-56befad5-ca2e-46cb-8ac6-081beb28a9ea service nova] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Updated VIF entry in instance network info cache for port de62ca8d-e627-414e-a2b0-e988e91c52d0. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 789.242033] env[69994]: DEBUG nova.network.neutron [req-3310317f-0a32-429b-b05f-d6448e991fb7 req-56befad5-ca2e-46cb-8ac6-081beb28a9ea service nova] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Updating instance_info_cache with network_info: [{"id": "de62ca8d-e627-414e-a2b0-e988e91c52d0", "address": "fa:16:3e:f3:e7:97", "network": {"id": "a40a358d-770e-4ef2-a47e-4e4a4532f844", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1348265973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97b5a4565fa644a4a510beb5ba006afb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde62ca8d-e6", "ovs_interfaceid": "de62ca8d-e627-414e-a2b0-e988e91c52d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.332510] env[69994]: DEBUG nova.compute.manager [req-f96e17da-91b8-42f9-be2a-59c028296374 req-11c1ed6e-d80b-4403-bac1-36624c0a8061 service nova] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Received event network-changed-de62ca8d-e627-414e-a2b0-e988e91c52d0 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 789.333661] env[69994]: DEBUG nova.compute.manager [req-f96e17da-91b8-42f9-be2a-59c028296374 req-11c1ed6e-d80b-4403-bac1-36624c0a8061 service nova] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Refreshing instance network info cache due to event network-changed-de62ca8d-e627-414e-a2b0-e988e91c52d0. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 789.333661] env[69994]: DEBUG oslo_concurrency.lockutils [req-f96e17da-91b8-42f9-be2a-59c028296374 req-11c1ed6e-d80b-4403-bac1-36624c0a8061 service nova] Acquiring lock "refresh_cache-0b284e71-7af2-4782-b950-4f7eac5221a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.425175] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a546f7ce-ed7b-412f-b3f1-f1697af90a69 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.448111] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-995ae202-27b3-4aa4-81e5-18f4ccb88441 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.590879] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.716s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 789.591363] env[69994]: DEBUG nova.compute.manager [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 789.594541] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8df7e3ad-b1ed-4f1d-932e-3ada58f4cfc2 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.013s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 789.594762] env[69994]: DEBUG nova.objects.instance [None req-8df7e3ad-b1ed-4f1d-932e-3ada58f4cfc2 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Lazy-loading 'resources' on Instance uuid 53a8714c-50f7-4990-a3d9-86f8fc908d03 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 789.661551] env[69994]: DEBUG oslo_vmware.api [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]526f8a2d-7482-834b-c0bd-a5e23fdddbdf, 'name': SearchDatastore_Task, 'duration_secs': 0.019814} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.662026] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 789.662245] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] b99b73e6-3348-4d5d-aa57-f01ace0bfc42/b99b73e6-3348-4d5d-aa57-f01ace0bfc42.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 789.662463] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-56457b24-8a4d-4179-a37f-dc8295d55cc4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.672624] env[69994]: DEBUG oslo_vmware.api [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 789.672624] env[69994]: value = "task-3241711" [ 789.672624] env[69994]: _type = "Task" [ 789.672624] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.681925] env[69994]: DEBUG oslo_vmware.api [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241711, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.699670] env[69994]: DEBUG oslo_vmware.api [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241708, 'name': ReconfigVM_Task, 'duration_secs': 1.012524} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.700038] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Reconfigured VM instance instance-0000002d to attach disk [datastore2] ed662f67-be0e-4f19-bb8a-6af39b4d348c/ed662f67-be0e-4f19-bb8a-6af39b4d348c.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 789.700880] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e2ba826b-62a7-4e40-9191-5bc5b3f4086a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.707807] env[69994]: DEBUG oslo_vmware.api [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 789.707807] env[69994]: value = "task-3241712" [ 789.707807] env[69994]: _type = "Task" [ 789.707807] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.720181] env[69994]: DEBUG oslo_concurrency.lockutils [None req-47d7b11d-1c8e-431a-a83f-29066a332817 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Acquiring lock "0b284e71-7af2-4782-b950-4f7eac5221a4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 789.720181] env[69994]: DEBUG oslo_concurrency.lockutils [None req-47d7b11d-1c8e-431a-a83f-29066a332817 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Lock "0b284e71-7af2-4782-b950-4f7eac5221a4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 789.720953] env[69994]: DEBUG oslo_concurrency.lockutils [None req-47d7b11d-1c8e-431a-a83f-29066a332817 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Acquiring lock "0b284e71-7af2-4782-b950-4f7eac5221a4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 789.721380] env[69994]: DEBUG oslo_concurrency.lockutils [None req-47d7b11d-1c8e-431a-a83f-29066a332817 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Lock "0b284e71-7af2-4782-b950-4f7eac5221a4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 789.721689] env[69994]: DEBUG oslo_concurrency.lockutils [None req-47d7b11d-1c8e-431a-a83f-29066a332817 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Lock "0b284e71-7af2-4782-b950-4f7eac5221a4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 789.725154] env[69994]: DEBUG oslo_vmware.api [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241712, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.725154] env[69994]: INFO nova.compute.manager [None req-47d7b11d-1c8e-431a-a83f-29066a332817 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Terminating instance [ 789.749561] env[69994]: DEBUG oslo_concurrency.lockutils [req-3310317f-0a32-429b-b05f-d6448e991fb7 req-56befad5-ca2e-46cb-8ac6-081beb28a9ea service nova] Releasing lock "refresh_cache-0b284e71-7af2-4782-b950-4f7eac5221a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 789.751810] env[69994]: DEBUG oslo_concurrency.lockutils [req-f96e17da-91b8-42f9-be2a-59c028296374 req-11c1ed6e-d80b-4403-bac1-36624c0a8061 service nova] Acquired lock "refresh_cache-0b284e71-7af2-4782-b950-4f7eac5221a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 789.752846] env[69994]: DEBUG nova.network.neutron [req-f96e17da-91b8-42f9-be2a-59c028296374 req-11c1ed6e-d80b-4403-bac1-36624c0a8061 service nova] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Refreshing network info cache for port de62ca8d-e627-414e-a2b0-e988e91c52d0 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 789.803216] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7e6325bb-69fd-4ef8-a966-253a88f6f761 tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Acquiring lock "744fe018-d12c-44c2-98f1-c11fbfffc98e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 789.803216] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7e6325bb-69fd-4ef8-a966-253a88f6f761 tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Lock "744fe018-d12c-44c2-98f1-c11fbfffc98e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 789.803216] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7e6325bb-69fd-4ef8-a966-253a88f6f761 tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Acquiring lock "744fe018-d12c-44c2-98f1-c11fbfffc98e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 789.803216] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7e6325bb-69fd-4ef8-a966-253a88f6f761 tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Lock "744fe018-d12c-44c2-98f1-c11fbfffc98e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 789.803533] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7e6325bb-69fd-4ef8-a966-253a88f6f761 tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Lock "744fe018-d12c-44c2-98f1-c11fbfffc98e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 789.804999] env[69994]: INFO nova.compute.manager [None req-7e6325bb-69fd-4ef8-a966-253a88f6f761 tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Terminating instance [ 789.962930] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Creating Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 789.964340] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-4a0bd0fe-94dd-4af1-aaa8-bbe8b3eea0bf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.971402] env[69994]: DEBUG oslo_vmware.api [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 789.971402] env[69994]: value = "task-3241713" [ 789.971402] env[69994]: _type = "Task" [ 789.971402] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.981491] env[69994]: DEBUG oslo_vmware.api [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241713, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.102073] env[69994]: DEBUG nova.compute.utils [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 790.104072] env[69994]: DEBUG nova.compute.manager [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 790.104317] env[69994]: DEBUG nova.network.neutron [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 790.168402] env[69994]: DEBUG nova.policy [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '29861e0318bb4e5fa5d92379b063367c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1aa7929b2e0d467c99c25acd8b7e92d6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 790.182965] env[69994]: DEBUG oslo_vmware.api [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241711, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.219979] env[69994]: DEBUG oslo_vmware.api [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241712, 'name': Rename_Task, 'duration_secs': 0.187333} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.220607] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 790.220861] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e1463d57-cfda-456e-99ce-7aad38eab030 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.234373] env[69994]: DEBUG nova.compute.manager [None req-47d7b11d-1c8e-431a-a83f-29066a332817 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 790.234661] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-47d7b11d-1c8e-431a-a83f-29066a332817 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 790.235038] env[69994]: DEBUG oslo_vmware.api [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 790.235038] env[69994]: value = "task-3241715" [ 790.235038] env[69994]: _type = "Task" [ 790.235038] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.235869] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54475fda-b0c9-4ca3-9101-0b3fc5390e29 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.254034] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-47d7b11d-1c8e-431a-a83f-29066a332817 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 790.259621] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dcf53971-ae36-4602-bb81-d2ce258f09b5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.265473] env[69994]: DEBUG oslo_vmware.api [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241715, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.276760] env[69994]: DEBUG oslo_vmware.api [None req-47d7b11d-1c8e-431a-a83f-29066a332817 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Waiting for the task: (returnval){ [ 790.276760] env[69994]: value = "task-3241716" [ 790.276760] env[69994]: _type = "Task" [ 790.276760] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.289849] env[69994]: DEBUG oslo_vmware.api [None req-47d7b11d-1c8e-431a-a83f-29066a332817 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': task-3241716, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.310491] env[69994]: DEBUG nova.compute.manager [None req-7e6325bb-69fd-4ef8-a966-253a88f6f761 tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 790.310491] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7e6325bb-69fd-4ef8-a966-253a88f6f761 tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 790.311498] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c7fddd5-3cd2-4be5-a818-d67fbe305fdc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.323160] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e6325bb-69fd-4ef8-a966-253a88f6f761 tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 790.323160] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-917a2d93-3d28-4cbf-aa75-bee89663383f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.330534] env[69994]: DEBUG nova.network.neutron [-] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.340816] env[69994]: DEBUG oslo_vmware.api [None req-7e6325bb-69fd-4ef8-a966-253a88f6f761 tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Waiting for the task: (returnval){ [ 790.340816] env[69994]: value = "task-3241717" [ 790.340816] env[69994]: _type = "Task" [ 790.340816] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.355300] env[69994]: DEBUG oslo_vmware.api [None req-7e6325bb-69fd-4ef8-a966-253a88f6f761 tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Task: {'id': task-3241717, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.489623] env[69994]: DEBUG oslo_vmware.api [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241713, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.566394] env[69994]: DEBUG nova.network.neutron [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Successfully created port: 03a2cce0-4737-45b4-8482-4eabd0e63386 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 790.610469] env[69994]: DEBUG nova.compute.manager [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 790.691230] env[69994]: DEBUG oslo_vmware.api [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241711, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.681668} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.692028] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] b99b73e6-3348-4d5d-aa57-f01ace0bfc42/b99b73e6-3348-4d5d-aa57-f01ace0bfc42.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 790.692028] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 790.692210] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f1b12847-1a4c-4e9b-b5cc-234e7d22753b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.704175] env[69994]: DEBUG oslo_vmware.api [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 790.704175] env[69994]: value = "task-3241718" [ 790.704175] env[69994]: _type = "Task" [ 790.704175] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.711957] env[69994]: DEBUG oslo_vmware.api [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241718, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.750317] env[69994]: DEBUG oslo_vmware.api [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241715, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.792727] env[69994]: DEBUG oslo_vmware.api [None req-47d7b11d-1c8e-431a-a83f-29066a332817 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': task-3241716, 'name': PowerOffVM_Task, 'duration_secs': 0.286493} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.792727] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-47d7b11d-1c8e-431a-a83f-29066a332817 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 790.792727] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-47d7b11d-1c8e-431a-a83f-29066a332817 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 790.792727] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6f01fb38-7f2b-4692-bfb7-09e67fe59d83 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.833922] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61f16882-4bd6-450f-ad18-5dc5639fc832 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.837260] env[69994]: INFO nova.compute.manager [-] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Took 1.61 seconds to deallocate network for instance. [ 790.848489] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddc5c303-f80f-4c7f-8410-3107c8debf13 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.860044] env[69994]: DEBUG oslo_vmware.api [None req-7e6325bb-69fd-4ef8-a966-253a88f6f761 tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Task: {'id': task-3241717, 'name': PowerOffVM_Task, 'duration_secs': 0.345898} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.889256] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e6325bb-69fd-4ef8-a966-253a88f6f761 tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 790.889723] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7e6325bb-69fd-4ef8-a966-253a88f6f761 tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 790.893218] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-138ceb87-149a-4f48-89f1-590c586aabc1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.896040] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e38fff3-e962-49a4-8eb9-dee688d7bcbd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.903545] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23948ef9-ad34-4c94-be4f-211ce75d5ca7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.920108] env[69994]: DEBUG nova.compute.provider_tree [None req-8df7e3ad-b1ed-4f1d-932e-3ada58f4cfc2 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 790.982776] env[69994]: DEBUG oslo_vmware.api [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241713, 'name': CreateSnapshot_Task, 'duration_secs': 0.799842} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.984264] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Created Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 790.984264] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7039336-88ec-4827-8eab-0e80e3d66d35 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.024181] env[69994]: DEBUG nova.network.neutron [req-f96e17da-91b8-42f9-be2a-59c028296374 req-11c1ed6e-d80b-4403-bac1-36624c0a8061 service nova] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Updated VIF entry in instance network info cache for port de62ca8d-e627-414e-a2b0-e988e91c52d0. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 791.024181] env[69994]: DEBUG nova.network.neutron [req-f96e17da-91b8-42f9-be2a-59c028296374 req-11c1ed6e-d80b-4403-bac1-36624c0a8061 service nova] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Updating instance_info_cache with network_info: [{"id": "de62ca8d-e627-414e-a2b0-e988e91c52d0", "address": "fa:16:3e:f3:e7:97", "network": {"id": "a40a358d-770e-4ef2-a47e-4e4a4532f844", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1348265973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97b5a4565fa644a4a510beb5ba006afb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde62ca8d-e6", "ovs_interfaceid": "de62ca8d-e627-414e-a2b0-e988e91c52d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.061014] env[69994]: DEBUG nova.compute.manager [req-bac36f54-da07-4dd6-95b4-55535fb002f9 req-501be0e0-7e6e-4292-8ad9-34f47baeba31 service nova] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Received event network-vif-deleted-7766d3a6-c9e9-46c7-ae9d-3e22ffcc98a0 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 791.215908] env[69994]: DEBUG oslo_vmware.api [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241718, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.101564} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.215908] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 791.215908] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-322bc452-55ce-4f91-ba07-9d1d5d5381bf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.242689] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Reconfiguring VM instance instance-0000002e to attach disk [datastore2] b99b73e6-3348-4d5d-aa57-f01ace0bfc42/b99b73e6-3348-4d5d-aa57-f01ace0bfc42.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 791.243058] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-baf1ebfc-0f18-4461-8888-e012b5516a06 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.266985] env[69994]: DEBUG oslo_vmware.api [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 791.266985] env[69994]: value = "task-3241721" [ 791.266985] env[69994]: _type = "Task" [ 791.266985] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.280466] env[69994]: DEBUG oslo_vmware.api [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241721, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.280738] env[69994]: DEBUG oslo_vmware.api [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241715, 'name': PowerOnVM_Task, 'duration_secs': 0.828261} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.280977] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 791.281192] env[69994]: INFO nova.compute.manager [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Took 9.86 seconds to spawn the instance on the hypervisor. [ 791.281379] env[69994]: DEBUG nova.compute.manager [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 791.282240] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd31a750-db4e-4096-b192-247c6323d16c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.349186] env[69994]: DEBUG oslo_concurrency.lockutils [None req-db548667-33dc-4cc5-a718-8be95fc62592 tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 791.349665] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-47d7b11d-1c8e-431a-a83f-29066a332817 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 791.349916] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-47d7b11d-1c8e-431a-a83f-29066a332817 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 791.350200] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-47d7b11d-1c8e-431a-a83f-29066a332817 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Deleting the datastore file [datastore1] 0b284e71-7af2-4782-b950-4f7eac5221a4 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 791.350695] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d8ce1f7d-1f50-47a0-b21a-7fdceb33f254 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.355819] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7e6325bb-69fd-4ef8-a966-253a88f6f761 tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 791.356217] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7e6325bb-69fd-4ef8-a966-253a88f6f761 tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 791.356490] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e6325bb-69fd-4ef8-a966-253a88f6f761 tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Deleting the datastore file [datastore1] 744fe018-d12c-44c2-98f1-c11fbfffc98e {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 791.356839] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7f803167-fcdf-4f2e-9363-24846aa6de22 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.361551] env[69994]: DEBUG oslo_vmware.api [None req-47d7b11d-1c8e-431a-a83f-29066a332817 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Waiting for the task: (returnval){ [ 791.361551] env[69994]: value = "task-3241722" [ 791.361551] env[69994]: _type = "Task" [ 791.361551] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.366401] env[69994]: DEBUG oslo_vmware.api [None req-7e6325bb-69fd-4ef8-a966-253a88f6f761 tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Waiting for the task: (returnval){ [ 791.366401] env[69994]: value = "task-3241723" [ 791.366401] env[69994]: _type = "Task" [ 791.366401] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.372974] env[69994]: DEBUG oslo_vmware.api [None req-47d7b11d-1c8e-431a-a83f-29066a332817 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': task-3241722, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.379989] env[69994]: DEBUG oslo_vmware.api [None req-7e6325bb-69fd-4ef8-a966-253a88f6f761 tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Task: {'id': task-3241723, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.424050] env[69994]: DEBUG nova.scheduler.client.report [None req-8df7e3ad-b1ed-4f1d-932e-3ada58f4cfc2 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 791.505622] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Creating linked-clone VM from snapshot {{(pid=69994) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 791.505944] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-e5cb1725-dcb3-4580-a1be-9478f55a8e12 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.515978] env[69994]: DEBUG oslo_vmware.api [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 791.515978] env[69994]: value = "task-3241724" [ 791.515978] env[69994]: _type = "Task" [ 791.515978] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.523711] env[69994]: DEBUG oslo_vmware.api [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241724, 'name': CloneVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.526524] env[69994]: DEBUG oslo_concurrency.lockutils [req-f96e17da-91b8-42f9-be2a-59c028296374 req-11c1ed6e-d80b-4403-bac1-36624c0a8061 service nova] Releasing lock "refresh_cache-0b284e71-7af2-4782-b950-4f7eac5221a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 791.624077] env[69994]: DEBUG nova.compute.manager [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 791.653134] env[69994]: DEBUG nova.virt.hardware [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:40:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='60367b47-c076-4b83-be63-6ff8f43248be',id=35,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-590586289',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 791.653376] env[69994]: DEBUG nova.virt.hardware [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 791.653535] env[69994]: DEBUG nova.virt.hardware [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 791.653711] env[69994]: DEBUG nova.virt.hardware [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 791.653910] env[69994]: DEBUG nova.virt.hardware [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 791.654204] env[69994]: DEBUG nova.virt.hardware [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 791.654484] env[69994]: DEBUG nova.virt.hardware [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 791.654687] env[69994]: DEBUG nova.virt.hardware [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 791.654897] env[69994]: DEBUG nova.virt.hardware [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 791.655132] env[69994]: DEBUG nova.virt.hardware [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 791.655361] env[69994]: DEBUG nova.virt.hardware [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 791.656528] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-450c3556-cb6d-4c4f-955c-2295523faa2d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.666648] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82e2597c-9dab-41af-86ca-5c09347ffbab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.780957] env[69994]: DEBUG oslo_vmware.api [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241721, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.799899] env[69994]: INFO nova.compute.manager [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Took 44.22 seconds to build instance. [ 791.867065] env[69994]: DEBUG nova.compute.manager [req-725dad4b-0c4e-4cec-91fd-280a72d60f04 req-9568287f-46d5-4b9c-84dd-940c68ff9e1d service nova] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Received event network-changed-d451c9f0-137b-44de-a79c-ec92c6f843bc {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 791.868453] env[69994]: DEBUG nova.compute.manager [req-725dad4b-0c4e-4cec-91fd-280a72d60f04 req-9568287f-46d5-4b9c-84dd-940c68ff9e1d service nova] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Refreshing instance network info cache due to event network-changed-d451c9f0-137b-44de-a79c-ec92c6f843bc. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 791.868453] env[69994]: DEBUG oslo_concurrency.lockutils [req-725dad4b-0c4e-4cec-91fd-280a72d60f04 req-9568287f-46d5-4b9c-84dd-940c68ff9e1d service nova] Acquiring lock "refresh_cache-1d548f54-4ffa-4299-9212-717350558ad4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.868453] env[69994]: DEBUG oslo_concurrency.lockutils [req-725dad4b-0c4e-4cec-91fd-280a72d60f04 req-9568287f-46d5-4b9c-84dd-940c68ff9e1d service nova] Acquired lock "refresh_cache-1d548f54-4ffa-4299-9212-717350558ad4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 791.868801] env[69994]: DEBUG nova.network.neutron [req-725dad4b-0c4e-4cec-91fd-280a72d60f04 req-9568287f-46d5-4b9c-84dd-940c68ff9e1d service nova] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Refreshing network info cache for port d451c9f0-137b-44de-a79c-ec92c6f843bc {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 791.888541] env[69994]: DEBUG oslo_vmware.api [None req-47d7b11d-1c8e-431a-a83f-29066a332817 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': task-3241722, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.525501} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.892652] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-47d7b11d-1c8e-431a-a83f-29066a332817 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 791.892846] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-47d7b11d-1c8e-431a-a83f-29066a332817 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 791.893130] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-47d7b11d-1c8e-431a-a83f-29066a332817 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 791.893404] env[69994]: INFO nova.compute.manager [None req-47d7b11d-1c8e-431a-a83f-29066a332817 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Took 1.66 seconds to destroy the instance on the hypervisor. [ 791.893748] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-47d7b11d-1c8e-431a-a83f-29066a332817 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 791.894476] env[69994]: DEBUG oslo_vmware.api [None req-7e6325bb-69fd-4ef8-a966-253a88f6f761 tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Task: {'id': task-3241723, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.894849] env[69994]: DEBUG nova.compute.manager [-] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 791.895071] env[69994]: DEBUG nova.network.neutron [-] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 791.930717] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8df7e3ad-b1ed-4f1d-932e-3ada58f4cfc2 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.336s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 791.933204] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5e782d55-03f0-4c9a-834f-e1695ca59d05 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.623s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 791.933495] env[69994]: DEBUG nova.objects.instance [None req-5e782d55-03f0-4c9a-834f-e1695ca59d05 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Lazy-loading 'resources' on Instance uuid 3c2c8a40-919d-4280-b9be-f8d95b1a263e {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 791.958926] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Acquiring lock "f3268fe1-768c-4d27-828a-5885ce166f90" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 791.959182] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Lock "f3268fe1-768c-4d27-828a-5885ce166f90" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 791.964406] env[69994]: INFO nova.scheduler.client.report [None req-8df7e3ad-b1ed-4f1d-932e-3ada58f4cfc2 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Deleted allocations for instance 53a8714c-50f7-4990-a3d9-86f8fc908d03 [ 792.026585] env[69994]: DEBUG oslo_vmware.api [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241724, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.141019] env[69994]: DEBUG nova.network.neutron [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Successfully updated port: 03a2cce0-4737-45b4-8482-4eabd0e63386 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 792.280489] env[69994]: DEBUG oslo_vmware.api [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241721, 'name': ReconfigVM_Task, 'duration_secs': 0.646556} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.280781] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Reconfigured VM instance instance-0000002e to attach disk [datastore2] b99b73e6-3348-4d5d-aa57-f01ace0bfc42/b99b73e6-3348-4d5d-aa57-f01ace0bfc42.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 792.281419] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0a905c3b-1ea6-4eb5-8b56-e4349ac466a6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.289093] env[69994]: DEBUG oslo_vmware.api [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 792.289093] env[69994]: value = "task-3241726" [ 792.289093] env[69994]: _type = "Task" [ 792.289093] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.299360] env[69994]: DEBUG oslo_vmware.api [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241726, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.302785] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b775bf24-7eb1-4717-ac4b-75e15d0434d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lock "ed662f67-be0e-4f19-bb8a-6af39b4d348c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 96.502s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 792.330640] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21d3c0a7-1237-4d71-800a-5a96e5c0dd1d tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Acquiring lock "289cbcc2-cd8f-4c4f-9169-a897f5527de1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 792.330809] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21d3c0a7-1237-4d71-800a-5a96e5c0dd1d tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Lock "289cbcc2-cd8f-4c4f-9169-a897f5527de1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 792.330976] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21d3c0a7-1237-4d71-800a-5a96e5c0dd1d tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Acquiring lock "289cbcc2-cd8f-4c4f-9169-a897f5527de1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 792.331181] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21d3c0a7-1237-4d71-800a-5a96e5c0dd1d tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Lock "289cbcc2-cd8f-4c4f-9169-a897f5527de1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 792.331351] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21d3c0a7-1237-4d71-800a-5a96e5c0dd1d tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Lock "289cbcc2-cd8f-4c4f-9169-a897f5527de1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 792.333436] env[69994]: INFO nova.compute.manager [None req-21d3c0a7-1237-4d71-800a-5a96e5c0dd1d tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Terminating instance [ 792.388737] env[69994]: DEBUG oslo_vmware.api [None req-7e6325bb-69fd-4ef8-a966-253a88f6f761 tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Task: {'id': task-3241723, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.535216} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.389039] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e6325bb-69fd-4ef8-a966-253a88f6f761 tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 792.389980] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7e6325bb-69fd-4ef8-a966-253a88f6f761 tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 792.390294] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7e6325bb-69fd-4ef8-a966-253a88f6f761 tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 792.390398] env[69994]: INFO nova.compute.manager [None req-7e6325bb-69fd-4ef8-a966-253a88f6f761 tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Took 2.08 seconds to destroy the instance on the hypervisor. [ 792.390928] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7e6325bb-69fd-4ef8-a966-253a88f6f761 tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 792.390928] env[69994]: DEBUG nova.compute.manager [-] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 792.390928] env[69994]: DEBUG nova.network.neutron [-] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 792.476973] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8df7e3ad-b1ed-4f1d-932e-3ada58f4cfc2 tempest-VolumesAssistedSnapshotsTest-349175056 tempest-VolumesAssistedSnapshotsTest-349175056-project-member] Lock "53a8714c-50f7-4990-a3d9-86f8fc908d03" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.037s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 792.530038] env[69994]: DEBUG oslo_vmware.api [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241724, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.549702] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5a1a93c2-55d2-43eb-a5b0-66e8df52d3ff tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Acquiring lock "86e514bb-8b47-4605-bd85-55c6c9874320" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 792.549929] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5a1a93c2-55d2-43eb-a5b0-66e8df52d3ff tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Lock "86e514bb-8b47-4605-bd85-55c6c9874320" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 792.550138] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5a1a93c2-55d2-43eb-a5b0-66e8df52d3ff tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Acquiring lock "86e514bb-8b47-4605-bd85-55c6c9874320-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 792.550316] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5a1a93c2-55d2-43eb-a5b0-66e8df52d3ff tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Lock "86e514bb-8b47-4605-bd85-55c6c9874320-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 792.550478] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5a1a93c2-55d2-43eb-a5b0-66e8df52d3ff tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Lock "86e514bb-8b47-4605-bd85-55c6c9874320-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 792.552778] env[69994]: INFO nova.compute.manager [None req-5a1a93c2-55d2-43eb-a5b0-66e8df52d3ff tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Terminating instance [ 792.644278] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquiring lock "refresh_cache-f07750f5-3f1d-4d97-98dc-285ed357cc7e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.644278] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquired lock "refresh_cache-f07750f5-3f1d-4d97-98dc-285ed357cc7e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 792.644278] env[69994]: DEBUG nova.network.neutron [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 792.804510] env[69994]: DEBUG oslo_vmware.api [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241726, 'name': Rename_Task, 'duration_secs': 0.145763} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.804838] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 792.805077] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-322f4152-9d34-465c-be49-511200764869 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.809691] env[69994]: DEBUG nova.compute.manager [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 792.812145] env[69994]: DEBUG nova.network.neutron [-] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 792.818639] env[69994]: DEBUG oslo_vmware.api [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 792.818639] env[69994]: value = "task-3241727" [ 792.818639] env[69994]: _type = "Task" [ 792.818639] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.618564] env[69994]: DEBUG nova.compute.manager [None req-21d3c0a7-1237-4d71-800a-5a96e5c0dd1d tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 793.618891] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-21d3c0a7-1237-4d71-800a-5a96e5c0dd1d tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 793.621266] env[69994]: DEBUG nova.compute.manager [None req-5a1a93c2-55d2-43eb-a5b0-66e8df52d3ff tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 793.621266] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5a1a93c2-55d2-43eb-a5b0-66e8df52d3ff tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 793.625527] env[69994]: INFO nova.compute.manager [-] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Took 1.73 seconds to deallocate network for instance. [ 793.625821] env[69994]: DEBUG oslo_vmware.api [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241727, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.626659] env[69994]: WARNING oslo_vmware.common.loopingcall [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] task run outlasted interval by 0.307152 sec [ 793.632280] env[69994]: DEBUG nova.network.neutron [req-725dad4b-0c4e-4cec-91fd-280a72d60f04 req-9568287f-46d5-4b9c-84dd-940c68ff9e1d service nova] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Updated VIF entry in instance network info cache for port d451c9f0-137b-44de-a79c-ec92c6f843bc. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 793.633069] env[69994]: DEBUG nova.network.neutron [req-725dad4b-0c4e-4cec-91fd-280a72d60f04 req-9568287f-46d5-4b9c-84dd-940c68ff9e1d service nova] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Updating instance_info_cache with network_info: [{"id": "d451c9f0-137b-44de-a79c-ec92c6f843bc", "address": "fa:16:3e:c1:68:d9", "network": {"id": "a40a358d-770e-4ef2-a47e-4e4a4532f844", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1348265973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.147", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97b5a4565fa644a4a510beb5ba006afb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd451c9f0-13", "ovs_interfaceid": "d451c9f0-137b-44de-a79c-ec92c6f843bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.635509] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b53c6a9-f18c-4f33-9842-40cb45c96947 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.640954] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c9417b3-4684-47c3-94e8-9e55141e745a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.647665] env[69994]: DEBUG nova.network.neutron [-] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.662202] env[69994]: DEBUG oslo_vmware.api [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241724, 'name': CloneVM_Task, 'duration_secs': 1.41074} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.669710] env[69994]: INFO nova.virt.vmwareapi.vmops [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Created linked-clone VM from snapshot [ 793.670328] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a1a93c2-55d2-43eb-a5b0-66e8df52d3ff tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 793.670579] env[69994]: DEBUG oslo_vmware.api [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241727, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.670843] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-21d3c0a7-1237-4d71-800a-5a96e5c0dd1d tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 793.672360] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-081a4260-816c-44d4-8b0c-dd2bb10344c4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.674486] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d1fbcd8d-fa03-47f4-bc54-64b88cb7a8fe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.675891] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b25918c1-8c31-4888-a470-518840a664c2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.680999] env[69994]: DEBUG oslo_concurrency.lockutils [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 793.687770] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Uploading image 1cda4692-a9f9-4b00-b35d-3419adcd1881 {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 793.694713] env[69994]: DEBUG oslo_vmware.api [None req-21d3c0a7-1237-4d71-800a-5a96e5c0dd1d tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Waiting for the task: (returnval){ [ 793.694713] env[69994]: value = "task-3241729" [ 793.694713] env[69994]: _type = "Task" [ 793.694713] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.694713] env[69994]: DEBUG oslo_vmware.api [None req-5a1a93c2-55d2-43eb-a5b0-66e8df52d3ff tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Waiting for the task: (returnval){ [ 793.694713] env[69994]: value = "task-3241728" [ 793.694713] env[69994]: _type = "Task" [ 793.694713] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.710039] env[69994]: DEBUG oslo_vmware.api [None req-21d3c0a7-1237-4d71-800a-5a96e5c0dd1d tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Task: {'id': task-3241729, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.714308] env[69994]: DEBUG oslo_vmware.api [None req-5a1a93c2-55d2-43eb-a5b0-66e8df52d3ff tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Task: {'id': task-3241728, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.727024] env[69994]: DEBUG nova.network.neutron [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 793.732023] env[69994]: DEBUG oslo_vmware.rw_handles [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 793.732023] env[69994]: value = "vm-647871" [ 793.732023] env[69994]: _type = "VirtualMachine" [ 793.732023] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 793.732975] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-84d334ec-5187-41e9-8350-3f64017375a5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.744262] env[69994]: DEBUG oslo_vmware.rw_handles [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lease: (returnval){ [ 793.744262] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]522bd533-72f5-99c9-c376-b90f8cbe8c09" [ 793.744262] env[69994]: _type = "HttpNfcLease" [ 793.744262] env[69994]: } obtained for exporting VM: (result){ [ 793.744262] env[69994]: value = "vm-647871" [ 793.744262] env[69994]: _type = "VirtualMachine" [ 793.744262] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 793.744262] env[69994]: DEBUG oslo_vmware.api [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the lease: (returnval){ [ 793.744262] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]522bd533-72f5-99c9-c376-b90f8cbe8c09" [ 793.744262] env[69994]: _type = "HttpNfcLease" [ 793.744262] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 793.756469] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 793.756469] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]522bd533-72f5-99c9-c376-b90f8cbe8c09" [ 793.756469] env[69994]: _type = "HttpNfcLease" [ 793.756469] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 793.756830] env[69994]: DEBUG oslo_vmware.rw_handles [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 793.756830] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]522bd533-72f5-99c9-c376-b90f8cbe8c09" [ 793.756830] env[69994]: _type = "HttpNfcLease" [ 793.756830] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 793.757689] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fbdec9c-fa12-44b2-9809-201ac8798ac9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.769632] env[69994]: DEBUG oslo_vmware.rw_handles [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b02670-38af-b3f7-9372-8a79c4491c9d/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 793.769889] env[69994]: DEBUG oslo_vmware.rw_handles [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b02670-38af-b3f7-9372-8a79c4491c9d/disk-0.vmdk for reading. {{(pid=69994) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 793.868474] env[69994]: DEBUG nova.compute.manager [req-1c425ef5-6e31-47c6-9c4e-7a51801f3ae2 req-c6050342-48a5-4adc-8c1d-732549f593f0 service nova] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Received event network-vif-plugged-03a2cce0-4737-45b4-8482-4eabd0e63386 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 793.868873] env[69994]: DEBUG oslo_concurrency.lockutils [req-1c425ef5-6e31-47c6-9c4e-7a51801f3ae2 req-c6050342-48a5-4adc-8c1d-732549f593f0 service nova] Acquiring lock "f07750f5-3f1d-4d97-98dc-285ed357cc7e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 793.869092] env[69994]: DEBUG oslo_concurrency.lockutils [req-1c425ef5-6e31-47c6-9c4e-7a51801f3ae2 req-c6050342-48a5-4adc-8c1d-732549f593f0 service nova] Lock "f07750f5-3f1d-4d97-98dc-285ed357cc7e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 793.869238] env[69994]: DEBUG oslo_concurrency.lockutils [req-1c425ef5-6e31-47c6-9c4e-7a51801f3ae2 req-c6050342-48a5-4adc-8c1d-732549f593f0 service nova] Lock "f07750f5-3f1d-4d97-98dc-285ed357cc7e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 793.869442] env[69994]: DEBUG nova.compute.manager [req-1c425ef5-6e31-47c6-9c4e-7a51801f3ae2 req-c6050342-48a5-4adc-8c1d-732549f593f0 service nova] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] No waiting events found dispatching network-vif-plugged-03a2cce0-4737-45b4-8482-4eabd0e63386 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 793.869588] env[69994]: WARNING nova.compute.manager [req-1c425ef5-6e31-47c6-9c4e-7a51801f3ae2 req-c6050342-48a5-4adc-8c1d-732549f593f0 service nova] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Received unexpected event network-vif-plugged-03a2cce0-4737-45b4-8482-4eabd0e63386 for instance with vm_state building and task_state spawning. [ 793.869729] env[69994]: DEBUG nova.compute.manager [req-1c425ef5-6e31-47c6-9c4e-7a51801f3ae2 req-c6050342-48a5-4adc-8c1d-732549f593f0 service nova] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Received event network-changed-03a2cce0-4737-45b4-8482-4eabd0e63386 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 793.869881] env[69994]: DEBUG nova.compute.manager [req-1c425ef5-6e31-47c6-9c4e-7a51801f3ae2 req-c6050342-48a5-4adc-8c1d-732549f593f0 service nova] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Refreshing instance network info cache due to event network-changed-03a2cce0-4737-45b4-8482-4eabd0e63386. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 793.870071] env[69994]: DEBUG oslo_concurrency.lockutils [req-1c425ef5-6e31-47c6-9c4e-7a51801f3ae2 req-c6050342-48a5-4adc-8c1d-732549f593f0 service nova] Acquiring lock "refresh_cache-f07750f5-3f1d-4d97-98dc-285ed357cc7e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.879012] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-87b790db-d443-4bd8-be8b-289e9d189c2c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.957513] env[69994]: DEBUG nova.network.neutron [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Updating instance_info_cache with network_info: [{"id": "03a2cce0-4737-45b4-8482-4eabd0e63386", "address": "fa:16:3e:fe:86:b3", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.76", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03a2cce0-47", "ovs_interfaceid": "03a2cce0-4737-45b4-8482-4eabd0e63386", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.978475] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4518ad06-72a1-4104-80c1-4ee4776d95da {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.985799] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c1c3a95-06b7-4ea5-8c04-54c29f56d65f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.019403] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddd73adc-93b5-4ee0-b1db-fbde4f527558 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.027928] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e85604a-a1e2-40f9-9447-0933d1c5877a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.045386] env[69994]: DEBUG nova.compute.provider_tree [None req-5e782d55-03f0-4c9a-834f-e1695ca59d05 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 794.150024] env[69994]: DEBUG oslo_concurrency.lockutils [req-725dad4b-0c4e-4cec-91fd-280a72d60f04 req-9568287f-46d5-4b9c-84dd-940c68ff9e1d service nova] Releasing lock "refresh_cache-1d548f54-4ffa-4299-9212-717350558ad4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 794.153553] env[69994]: INFO nova.compute.manager [-] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Took 1.76 seconds to deallocate network for instance. [ 794.164541] env[69994]: DEBUG oslo_concurrency.lockutils [None req-47d7b11d-1c8e-431a-a83f-29066a332817 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 794.166202] env[69994]: DEBUG nova.compute.manager [req-8e0af351-3236-4964-b563-7d7aa2455b4f req-f996b6d4-f09a-4fd9-951e-2337bf82e86a service nova] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Received event network-vif-deleted-de62ca8d-e627-414e-a2b0-e988e91c52d0 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 794.166202] env[69994]: DEBUG nova.compute.manager [req-8e0af351-3236-4964-b563-7d7aa2455b4f req-f996b6d4-f09a-4fd9-951e-2337bf82e86a service nova] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Received event network-changed-d451c9f0-137b-44de-a79c-ec92c6f843bc {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 794.166202] env[69994]: DEBUG nova.compute.manager [req-8e0af351-3236-4964-b563-7d7aa2455b4f req-f996b6d4-f09a-4fd9-951e-2337bf82e86a service nova] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Refreshing instance network info cache due to event network-changed-d451c9f0-137b-44de-a79c-ec92c6f843bc. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 794.166202] env[69994]: DEBUG oslo_concurrency.lockutils [req-8e0af351-3236-4964-b563-7d7aa2455b4f req-f996b6d4-f09a-4fd9-951e-2337bf82e86a service nova] Acquiring lock "refresh_cache-1d548f54-4ffa-4299-9212-717350558ad4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.166202] env[69994]: DEBUG oslo_concurrency.lockutils [req-8e0af351-3236-4964-b563-7d7aa2455b4f req-f996b6d4-f09a-4fd9-951e-2337bf82e86a service nova] Acquired lock "refresh_cache-1d548f54-4ffa-4299-9212-717350558ad4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 794.166423] env[69994]: DEBUG nova.network.neutron [req-8e0af351-3236-4964-b563-7d7aa2455b4f req-f996b6d4-f09a-4fd9-951e-2337bf82e86a service nova] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Refreshing network info cache for port d451c9f0-137b-44de-a79c-ec92c6f843bc {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 794.167902] env[69994]: DEBUG oslo_vmware.api [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241727, 'name': PowerOnVM_Task, 'duration_secs': 0.848812} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.168736] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 794.170198] env[69994]: INFO nova.compute.manager [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Took 9.90 seconds to spawn the instance on the hypervisor. [ 794.170198] env[69994]: DEBUG nova.compute.manager [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 794.170198] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1dbc749-d473-474d-9c88-4ff1a0d60860 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.207778] env[69994]: DEBUG oslo_vmware.api [None req-21d3c0a7-1237-4d71-800a-5a96e5c0dd1d tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Task: {'id': task-3241729, 'name': PowerOffVM_Task, 'duration_secs': 0.199185} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.212293] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-21d3c0a7-1237-4d71-800a-5a96e5c0dd1d tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 794.212293] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-21d3c0a7-1237-4d71-800a-5a96e5c0dd1d tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 794.212293] env[69994]: DEBUG oslo_vmware.api [None req-5a1a93c2-55d2-43eb-a5b0-66e8df52d3ff tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Task: {'id': task-3241728, 'name': PowerOffVM_Task, 'duration_secs': 0.257527} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.212293] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4766d454-de6c-463c-a1e8-e30985ec75e8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.214117] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a1a93c2-55d2-43eb-a5b0-66e8df52d3ff tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 794.214403] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5a1a93c2-55d2-43eb-a5b0-66e8df52d3ff tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 794.214766] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-25b3f26c-2d11-46e0-bdf1-26778e5d7120 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.280162] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5a1a93c2-55d2-43eb-a5b0-66e8df52d3ff tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 794.280162] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5a1a93c2-55d2-43eb-a5b0-66e8df52d3ff tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 794.280162] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a1a93c2-55d2-43eb-a5b0-66e8df52d3ff tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Deleting the datastore file [datastore2] 86e514bb-8b47-4605-bd85-55c6c9874320 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 794.280162] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1ec3aff5-4ba1-4535-bfa9-2d6946285efc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.286397] env[69994]: DEBUG oslo_vmware.api [None req-5a1a93c2-55d2-43eb-a5b0-66e8df52d3ff tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Waiting for the task: (returnval){ [ 794.286397] env[69994]: value = "task-3241734" [ 794.286397] env[69994]: _type = "Task" [ 794.286397] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.299507] env[69994]: DEBUG oslo_vmware.api [None req-5a1a93c2-55d2-43eb-a5b0-66e8df52d3ff tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Task: {'id': task-3241734, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.304317] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-21d3c0a7-1237-4d71-800a-5a96e5c0dd1d tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 794.304608] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-21d3c0a7-1237-4d71-800a-5a96e5c0dd1d tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 794.304748] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-21d3c0a7-1237-4d71-800a-5a96e5c0dd1d tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Deleting the datastore file [datastore1] 289cbcc2-cd8f-4c4f-9169-a897f5527de1 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 794.305397] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ae9347ec-ed89-4eef-820f-507237de75ff {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.313501] env[69994]: DEBUG oslo_vmware.api [None req-21d3c0a7-1237-4d71-800a-5a96e5c0dd1d tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Waiting for the task: (returnval){ [ 794.313501] env[69994]: value = "task-3241735" [ 794.313501] env[69994]: _type = "Task" [ 794.313501] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.322321] env[69994]: DEBUG oslo_vmware.api [None req-21d3c0a7-1237-4d71-800a-5a96e5c0dd1d tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Task: {'id': task-3241735, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.428993] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c1399444-3dc1-4821-a62a-2baf311e4790 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Acquiring lock "0b284e71-7af2-4782-b950-4f7eac5221a4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 794.464274] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Releasing lock "refresh_cache-f07750f5-3f1d-4d97-98dc-285ed357cc7e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 794.466018] env[69994]: DEBUG nova.compute.manager [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Instance network_info: |[{"id": "03a2cce0-4737-45b4-8482-4eabd0e63386", "address": "fa:16:3e:fe:86:b3", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.76", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03a2cce0-47", "ovs_interfaceid": "03a2cce0-4737-45b4-8482-4eabd0e63386", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 794.466018] env[69994]: DEBUG oslo_concurrency.lockutils [req-1c425ef5-6e31-47c6-9c4e-7a51801f3ae2 req-c6050342-48a5-4adc-8c1d-732549f593f0 service nova] Acquired lock "refresh_cache-f07750f5-3f1d-4d97-98dc-285ed357cc7e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 794.466233] env[69994]: DEBUG nova.network.neutron [req-1c425ef5-6e31-47c6-9c4e-7a51801f3ae2 req-c6050342-48a5-4adc-8c1d-732549f593f0 service nova] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Refreshing network info cache for port 03a2cce0-4737-45b4-8482-4eabd0e63386 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 794.466576] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:86:b3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0cd5d325-3053-407e-a4ee-f627e82a23f9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '03a2cce0-4737-45b4-8482-4eabd0e63386', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 794.476368] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 794.476999] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 794.477326] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6ae3e004-4754-4a46-a9d9-6aa546d1141b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.502744] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 794.502744] env[69994]: value = "task-3241736" [ 794.502744] env[69994]: _type = "Task" [ 794.502744] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.512791] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241736, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.550105] env[69994]: DEBUG nova.scheduler.client.report [None req-5e782d55-03f0-4c9a-834f-e1695ca59d05 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 794.670219] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7e6325bb-69fd-4ef8-a966-253a88f6f761 tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 794.697136] env[69994]: INFO nova.compute.manager [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Took 41.31 seconds to build instance. [ 794.800420] env[69994]: DEBUG oslo_vmware.api [None req-5a1a93c2-55d2-43eb-a5b0-66e8df52d3ff tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Task: {'id': task-3241734, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161182} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.805206] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a1a93c2-55d2-43eb-a5b0-66e8df52d3ff tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 794.805700] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5a1a93c2-55d2-43eb-a5b0-66e8df52d3ff tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 794.805983] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5a1a93c2-55d2-43eb-a5b0-66e8df52d3ff tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 794.806669] env[69994]: INFO nova.compute.manager [None req-5a1a93c2-55d2-43eb-a5b0-66e8df52d3ff tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Took 1.19 seconds to destroy the instance on the hypervisor. [ 794.807041] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5a1a93c2-55d2-43eb-a5b0-66e8df52d3ff tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 794.807324] env[69994]: DEBUG nova.compute.manager [-] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 794.808342] env[69994]: DEBUG nova.network.neutron [-] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 794.825370] env[69994]: DEBUG oslo_vmware.api [None req-21d3c0a7-1237-4d71-800a-5a96e5c0dd1d tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Task: {'id': task-3241735, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159113} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.825766] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-21d3c0a7-1237-4d71-800a-5a96e5c0dd1d tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 794.826325] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-21d3c0a7-1237-4d71-800a-5a96e5c0dd1d tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 794.827878] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-21d3c0a7-1237-4d71-800a-5a96e5c0dd1d tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 794.828361] env[69994]: INFO nova.compute.manager [None req-21d3c0a7-1237-4d71-800a-5a96e5c0dd1d tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Took 1.21 seconds to destroy the instance on the hypervisor. [ 794.828775] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-21d3c0a7-1237-4d71-800a-5a96e5c0dd1d tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 794.829284] env[69994]: DEBUG nova.compute.manager [-] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 794.829538] env[69994]: DEBUG nova.network.neutron [-] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 795.017051] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241736, 'name': CreateVM_Task, 'duration_secs': 0.472394} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.017315] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 795.018061] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.018330] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 795.018777] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 795.019046] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e0a6217-660d-4a1c-b4d3-96047e1ff440 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.023989] env[69994]: DEBUG oslo_vmware.api [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for the task: (returnval){ [ 795.023989] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]523e49e8-ff98-c574-0505-9407f07f6dab" [ 795.023989] env[69994]: _type = "Task" [ 795.023989] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.032664] env[69994]: DEBUG oslo_vmware.api [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]523e49e8-ff98-c574-0505-9407f07f6dab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.057999] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5e782d55-03f0-4c9a-834f-e1695ca59d05 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.125s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.060593] env[69994]: DEBUG oslo_concurrency.lockutils [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.585s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.062339] env[69994]: INFO nova.compute.claims [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 795.093167] env[69994]: INFO nova.scheduler.client.report [None req-5e782d55-03f0-4c9a-834f-e1695ca59d05 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Deleted allocations for instance 3c2c8a40-919d-4280-b9be-f8d95b1a263e [ 795.200778] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9884ab73-eec4-40de-8854-831fd9be8d32 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lock "b99b73e6-3348-4d5d-aa57-f01ace0bfc42" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 97.777s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.521156] env[69994]: DEBUG nova.network.neutron [req-8e0af351-3236-4964-b563-7d7aa2455b4f req-f996b6d4-f09a-4fd9-951e-2337bf82e86a service nova] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Updated VIF entry in instance network info cache for port d451c9f0-137b-44de-a79c-ec92c6f843bc. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 795.521156] env[69994]: DEBUG nova.network.neutron [req-8e0af351-3236-4964-b563-7d7aa2455b4f req-f996b6d4-f09a-4fd9-951e-2337bf82e86a service nova] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Updating instance_info_cache with network_info: [{"id": "d451c9f0-137b-44de-a79c-ec92c6f843bc", "address": "fa:16:3e:c1:68:d9", "network": {"id": "a40a358d-770e-4ef2-a47e-4e4a4532f844", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1348265973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97b5a4565fa644a4a510beb5ba006afb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd451c9f0-13", "ovs_interfaceid": "d451c9f0-137b-44de-a79c-ec92c6f843bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.531013] env[69994]: DEBUG nova.network.neutron [req-1c425ef5-6e31-47c6-9c4e-7a51801f3ae2 req-c6050342-48a5-4adc-8c1d-732549f593f0 service nova] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Updated VIF entry in instance network info cache for port 03a2cce0-4737-45b4-8482-4eabd0e63386. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 795.531458] env[69994]: DEBUG nova.network.neutron [req-1c425ef5-6e31-47c6-9c4e-7a51801f3ae2 req-c6050342-48a5-4adc-8c1d-732549f593f0 service nova] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Updating instance_info_cache with network_info: [{"id": "03a2cce0-4737-45b4-8482-4eabd0e63386", "address": "fa:16:3e:fe:86:b3", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.76", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03a2cce0-47", "ovs_interfaceid": "03a2cce0-4737-45b4-8482-4eabd0e63386", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.540207] env[69994]: DEBUG oslo_vmware.api [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]523e49e8-ff98-c574-0505-9407f07f6dab, 'name': SearchDatastore_Task, 'duration_secs': 0.02001} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.540207] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 795.540207] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 795.540573] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.540891] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 795.541210] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 795.542375] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8bcbd32e-2716-49fd-ace7-780cff611775 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.551689] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 795.551947] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 795.553268] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69555544-976f-454c-b8df-82c5a15f6d69 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.562054] env[69994]: DEBUG oslo_vmware.api [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for the task: (returnval){ [ 795.562054] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5231f5a9-ce78-3960-300d-fa3fb265aaed" [ 795.562054] env[69994]: _type = "Task" [ 795.562054] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.572372] env[69994]: DEBUG oslo_vmware.api [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5231f5a9-ce78-3960-300d-fa3fb265aaed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.600152] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5e782d55-03f0-4c9a-834f-e1695ca59d05 tempest-ImagesNegativeTestJSON-1497610690 tempest-ImagesNegativeTestJSON-1497610690-project-member] Lock "3c2c8a40-919d-4280-b9be-f8d95b1a263e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.802s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.703181] env[69994]: DEBUG nova.compute.manager [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 796.023513] env[69994]: DEBUG oslo_concurrency.lockutils [req-8e0af351-3236-4964-b563-7d7aa2455b4f req-f996b6d4-f09a-4fd9-951e-2337bf82e86a service nova] Releasing lock "refresh_cache-1d548f54-4ffa-4299-9212-717350558ad4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 796.034948] env[69994]: DEBUG oslo_concurrency.lockutils [req-1c425ef5-6e31-47c6-9c4e-7a51801f3ae2 req-c6050342-48a5-4adc-8c1d-732549f593f0 service nova] Releasing lock "refresh_cache-f07750f5-3f1d-4d97-98dc-285ed357cc7e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 796.035569] env[69994]: DEBUG nova.compute.manager [req-1c425ef5-6e31-47c6-9c4e-7a51801f3ae2 req-c6050342-48a5-4adc-8c1d-732549f593f0 service nova] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Received event network-vif-deleted-46e255ba-a2d4-4bd1-942b-f18624bd0198 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 796.082531] env[69994]: DEBUG oslo_vmware.api [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5231f5a9-ce78-3960-300d-fa3fb265aaed, 'name': SearchDatastore_Task, 'duration_secs': 0.008727} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.083827] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b364780-5686-44a9-9e3b-adf7a58e65c1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.093020] env[69994]: DEBUG oslo_vmware.api [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for the task: (returnval){ [ 796.093020] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52a46f2e-09a4-8c7e-b213-b9ba04744ed5" [ 796.093020] env[69994]: _type = "Task" [ 796.093020] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.100716] env[69994]: DEBUG oslo_vmware.api [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52a46f2e-09a4-8c7e-b213-b9ba04744ed5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.166165] env[69994]: DEBUG nova.network.neutron [-] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.232443] env[69994]: DEBUG oslo_concurrency.lockutils [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 796.248486] env[69994]: DEBUG nova.network.neutron [-] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.453418] env[69994]: DEBUG nova.compute.manager [req-129e338c-5fb1-4a81-9fb3-7544236f4fc1 req-56f8db47-5f0d-4f25-9086-feea8b59af72 service nova] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Received event network-vif-deleted-50763b02-561b-4c13-8a91-c7e639f09715 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 796.607780] env[69994]: DEBUG oslo_vmware.api [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52a46f2e-09a4-8c7e-b213-b9ba04744ed5, 'name': SearchDatastore_Task, 'duration_secs': 0.011371} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.610526] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 796.611410] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] f07750f5-3f1d-4d97-98dc-285ed357cc7e/f07750f5-3f1d-4d97-98dc-285ed357cc7e.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 796.611920] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1aac1f9c-9dd6-493d-ba1f-485b1878a2ac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.621749] env[69994]: DEBUG oslo_vmware.api [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for the task: (returnval){ [ 796.621749] env[69994]: value = "task-3241738" [ 796.621749] env[69994]: _type = "Task" [ 796.621749] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.634203] env[69994]: DEBUG oslo_vmware.api [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241738, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.665245] env[69994]: DEBUG nova.compute.manager [req-d101204d-1760-4fac-858b-8eadbd519bb1 req-672fada3-ce25-4f44-9640-3f1c7e2280f9 service nova] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Received event network-vif-deleted-8c75c77e-7172-418b-80e0-dc189770afca {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 796.669020] env[69994]: INFO nova.compute.manager [-] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Took 1.84 seconds to deallocate network for instance. [ 796.749154] env[69994]: INFO nova.compute.manager [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Rescuing [ 796.749583] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquiring lock "refresh_cache-b99b73e6-3348-4d5d-aa57-f01ace0bfc42" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.749583] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquired lock "refresh_cache-b99b73e6-3348-4d5d-aa57-f01ace0bfc42" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 796.749746] env[69994]: DEBUG nova.network.neutron [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 796.751252] env[69994]: INFO nova.compute.manager [-] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Took 1.94 seconds to deallocate network for instance. [ 796.766845] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd7fc395-3af5-4c88-b6db-2245911f84c2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.778503] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da44b271-3df7-4da4-adff-036fcca64263 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.787626] env[69994]: DEBUG nova.objects.instance [None req-66238c41-6d22-40d5-a678-ac95eb0180b0 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Lazy-loading 'flavor' on Instance uuid 15d17772-ac57-49a3-b261-bf49b902f658 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 796.820669] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe51011b-74c0-41e7-8cd4-d5d90c719760 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.829835] env[69994]: DEBUG oslo_concurrency.lockutils [None req-66238c41-6d22-40d5-a678-ac95eb0180b0 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Acquiring lock "refresh_cache-15d17772-ac57-49a3-b261-bf49b902f658" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.830777] env[69994]: DEBUG oslo_concurrency.lockutils [None req-66238c41-6d22-40d5-a678-ac95eb0180b0 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Acquired lock "refresh_cache-15d17772-ac57-49a3-b261-bf49b902f658" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 796.840271] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f84f619-a370-4c2c-ba6e-4e93644bcc63 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.857322] env[69994]: DEBUG nova.compute.provider_tree [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 797.132074] env[69994]: DEBUG oslo_vmware.api [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241738, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.500866} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.132363] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] f07750f5-3f1d-4d97-98dc-285ed357cc7e/f07750f5-3f1d-4d97-98dc-285ed357cc7e.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 797.134264] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 797.134264] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ead1e994-fb04-42fb-845d-81a89b7eb18a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.141392] env[69994]: DEBUG oslo_vmware.api [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for the task: (returnval){ [ 797.141392] env[69994]: value = "task-3241739" [ 797.141392] env[69994]: _type = "Task" [ 797.141392] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.152333] env[69994]: DEBUG oslo_vmware.api [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241739, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.175864] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21d3c0a7-1237-4d71-800a-5a96e5c0dd1d tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 797.257767] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5a1a93c2-55d2-43eb-a5b0-66e8df52d3ff tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 797.260393] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6f10cc18-32ef-4efd-83f8-dac56a6e1a76 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Acquiring lock "4dbf53e0-caa1-41f4-8376-dfba8d8567cd" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 797.260689] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6f10cc18-32ef-4efd-83f8-dac56a6e1a76 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Lock "4dbf53e0-caa1-41f4-8376-dfba8d8567cd" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 797.262091] env[69994]: DEBUG nova.compute.manager [None req-6f10cc18-32ef-4efd-83f8-dac56a6e1a76 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 797.262091] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bba2a14-41ca-4819-b79e-5e100bd0c5c4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.269897] env[69994]: DEBUG nova.compute.manager [None req-6f10cc18-32ef-4efd-83f8-dac56a6e1a76 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69994) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 797.270570] env[69994]: DEBUG nova.objects.instance [None req-6f10cc18-32ef-4efd-83f8-dac56a6e1a76 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Lazy-loading 'flavor' on Instance uuid 4dbf53e0-caa1-41f4-8376-dfba8d8567cd {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 797.362120] env[69994]: DEBUG nova.scheduler.client.report [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 797.491921] env[69994]: DEBUG nova.network.neutron [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Updating instance_info_cache with network_info: [{"id": "2ad3bbc3-0a2c-4d13-a9e1-cbc2bdca55c0", "address": "fa:16:3e:52:d0:9b", "network": {"id": "36dc1c22-7ec0-41a6-a3ca-873fea58c351", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1496166079-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30a9ea2f804f49ec8c5c6861b507454e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ad3bbc3-0a", "ovs_interfaceid": "2ad3bbc3-0a2c-4d13-a9e1-cbc2bdca55c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.651569] env[69994]: DEBUG oslo_vmware.api [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241739, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.100438} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.651702] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 797.652450] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f4886d9-d8ac-42cd-bc0a-d17926ee0fe7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.674900] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] f07750f5-3f1d-4d97-98dc-285ed357cc7e/f07750f5-3f1d-4d97-98dc-285ed357cc7e.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 797.675243] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-df9dadec-782b-4ff1-aa62-c78acd0e4909 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.697017] env[69994]: DEBUG oslo_vmware.api [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for the task: (returnval){ [ 797.697017] env[69994]: value = "task-3241740" [ 797.697017] env[69994]: _type = "Task" [ 797.697017] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.705608] env[69994]: DEBUG oslo_vmware.api [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241740, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.762213] env[69994]: DEBUG nova.network.neutron [None req-66238c41-6d22-40d5-a678-ac95eb0180b0 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 797.875020] env[69994]: DEBUG oslo_concurrency.lockutils [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.812s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 797.875020] env[69994]: DEBUG nova.compute.manager [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 797.876918] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cc8c2cd7-fe67-42b1-9650-83c2031b28a7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.528s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 797.877305] env[69994]: DEBUG nova.objects.instance [None req-cc8c2cd7-fe67-42b1-9650-83c2031b28a7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lazy-loading 'resources' on Instance uuid 5f672fd4-b96f-4506-aa1e-96692a00cb43 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 798.000197] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Releasing lock "refresh_cache-b99b73e6-3348-4d5d-aa57-f01ace0bfc42" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 798.210285] env[69994]: DEBUG oslo_vmware.api [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241740, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.281182] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f10cc18-32ef-4efd-83f8-dac56a6e1a76 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 798.281182] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-13e50b7e-1f5f-4731-933e-465cd2335e0b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.286757] env[69994]: DEBUG oslo_vmware.api [None req-6f10cc18-32ef-4efd-83f8-dac56a6e1a76 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Waiting for the task: (returnval){ [ 798.286757] env[69994]: value = "task-3241741" [ 798.286757] env[69994]: _type = "Task" [ 798.286757] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.299544] env[69994]: DEBUG oslo_vmware.api [None req-6f10cc18-32ef-4efd-83f8-dac56a6e1a76 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241741, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.384022] env[69994]: DEBUG nova.compute.utils [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 798.390287] env[69994]: DEBUG nova.compute.manager [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 798.393021] env[69994]: DEBUG nova.network.neutron [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 798.463423] env[69994]: DEBUG nova.policy [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '08a2b92b6c0141a6a7e301e064032289', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '38d5a89ed7c248c3be506ef12caf5f1e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 798.710893] env[69994]: DEBUG oslo_vmware.api [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241740, 'name': ReconfigVM_Task, 'duration_secs': 0.656012} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.711257] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Reconfigured VM instance instance-0000002f to attach disk [datastore1] f07750f5-3f1d-4d97-98dc-285ed357cc7e/f07750f5-3f1d-4d97-98dc-285ed357cc7e.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 798.714703] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-882b1f96-e5b8-4e0c-9896-ea0d71b4bdf7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.725551] env[69994]: DEBUG oslo_vmware.api [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for the task: (returnval){ [ 798.725551] env[69994]: value = "task-3241742" [ 798.725551] env[69994]: _type = "Task" [ 798.725551] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.738533] env[69994]: DEBUG oslo_vmware.api [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241742, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.799798] env[69994]: DEBUG oslo_vmware.api [None req-6f10cc18-32ef-4efd-83f8-dac56a6e1a76 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241741, 'name': PowerOffVM_Task, 'duration_secs': 0.312269} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.799798] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f10cc18-32ef-4efd-83f8-dac56a6e1a76 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 798.800223] env[69994]: DEBUG nova.compute.manager [None req-6f10cc18-32ef-4efd-83f8-dac56a6e1a76 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 798.801782] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-366c1149-3a1e-4058-b151-dcd1f395af89 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.894561] env[69994]: DEBUG nova.compute.manager [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 799.086834] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b398fdb2-7a3a-41cf-8555-bd22feabbd7c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.095881] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-941c3640-79a7-4bcf-bd4b-6f212edf08c7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.136373] env[69994]: DEBUG nova.network.neutron [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Successfully created port: f7d5e758-a993-4a15-8bba-a695f99a96f4 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 799.139017] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40be6988-8bd4-48bf-b1f6-6e2fc29139c1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.148976] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e58b71ed-33b4-4bd0-8d78-be6b7fd64ae3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.164651] env[69994]: DEBUG nova.compute.provider_tree [None req-cc8c2cd7-fe67-42b1-9650-83c2031b28a7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 799.171422] env[69994]: DEBUG nova.network.neutron [None req-66238c41-6d22-40d5-a678-ac95eb0180b0 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Updating instance_info_cache with network_info: [{"id": "239a38e8-39e8-487b-ba99-cf85c99d41f1", "address": "fa:16:3e:67:40:01", "network": {"id": "f408d322-470b-4fe5-9b10-67ef413491d1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1757029696-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5543f4937e604cc189cc63c178705112", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap239a38e8-39", "ovs_interfaceid": "239a38e8-39e8-487b-ba99-cf85c99d41f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 799.239807] env[69994]: DEBUG oslo_vmware.api [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241742, 'name': Rename_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.309387] env[69994]: DEBUG nova.compute.manager [req-73784484-2f08-4cac-af3c-3e888ac032d8 req-0eb55a6e-6982-4cd7-bbaa-132f0b410a8c service nova] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Received event network-changed-239a38e8-39e8-487b-ba99-cf85c99d41f1 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 799.309526] env[69994]: DEBUG nova.compute.manager [req-73784484-2f08-4cac-af3c-3e888ac032d8 req-0eb55a6e-6982-4cd7-bbaa-132f0b410a8c service nova] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Refreshing instance network info cache due to event network-changed-239a38e8-39e8-487b-ba99-cf85c99d41f1. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 799.309946] env[69994]: DEBUG oslo_concurrency.lockutils [req-73784484-2f08-4cac-af3c-3e888ac032d8 req-0eb55a6e-6982-4cd7-bbaa-132f0b410a8c service nova] Acquiring lock "refresh_cache-15d17772-ac57-49a3-b261-bf49b902f658" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.319428] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6f10cc18-32ef-4efd-83f8-dac56a6e1a76 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Lock "4dbf53e0-caa1-41f4-8376-dfba8d8567cd" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.059s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 799.548518] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 799.549521] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-06bb6840-9d06-4e39-8865-fd6e8d1f0c58 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.557329] env[69994]: DEBUG oslo_vmware.api [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 799.557329] env[69994]: value = "task-3241743" [ 799.557329] env[69994]: _type = "Task" [ 799.557329] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.565608] env[69994]: DEBUG oslo_vmware.api [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241743, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.675372] env[69994]: DEBUG nova.scheduler.client.report [None req-cc8c2cd7-fe67-42b1-9650-83c2031b28a7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 799.678754] env[69994]: DEBUG oslo_concurrency.lockutils [None req-66238c41-6d22-40d5-a678-ac95eb0180b0 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Releasing lock "refresh_cache-15d17772-ac57-49a3-b261-bf49b902f658" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 799.679804] env[69994]: DEBUG nova.compute.manager [None req-66238c41-6d22-40d5-a678-ac95eb0180b0 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Inject network info {{(pid=69994) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 799.679867] env[69994]: DEBUG nova.compute.manager [None req-66238c41-6d22-40d5-a678-ac95eb0180b0 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] network_info to inject: |[{"id": "239a38e8-39e8-487b-ba99-cf85c99d41f1", "address": "fa:16:3e:67:40:01", "network": {"id": "f408d322-470b-4fe5-9b10-67ef413491d1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1757029696-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5543f4937e604cc189cc63c178705112", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap239a38e8-39", "ovs_interfaceid": "239a38e8-39e8-487b-ba99-cf85c99d41f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 799.685531] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-66238c41-6d22-40d5-a678-ac95eb0180b0 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Reconfiguring VM instance to set the machine id {{(pid=69994) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 799.686304] env[69994]: DEBUG oslo_concurrency.lockutils [req-73784484-2f08-4cac-af3c-3e888ac032d8 req-0eb55a6e-6982-4cd7-bbaa-132f0b410a8c service nova] Acquired lock "refresh_cache-15d17772-ac57-49a3-b261-bf49b902f658" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 799.686458] env[69994]: DEBUG nova.network.neutron [req-73784484-2f08-4cac-af3c-3e888ac032d8 req-0eb55a6e-6982-4cd7-bbaa-132f0b410a8c service nova] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Refreshing network info cache for port 239a38e8-39e8-487b-ba99-cf85c99d41f1 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 799.687733] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4e465c77-115f-4f08-81cd-65c64762d911 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.709470] env[69994]: DEBUG oslo_vmware.api [None req-66238c41-6d22-40d5-a678-ac95eb0180b0 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Waiting for the task: (returnval){ [ 799.709470] env[69994]: value = "task-3241744" [ 799.709470] env[69994]: _type = "Task" [ 799.709470] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.718791] env[69994]: DEBUG oslo_vmware.api [None req-66238c41-6d22-40d5-a678-ac95eb0180b0 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Task: {'id': task-3241744, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.739668] env[69994]: DEBUG oslo_vmware.api [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241742, 'name': Rename_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.909539] env[69994]: DEBUG nova.compute.manager [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 799.938238] env[69994]: DEBUG nova.virt.hardware [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 799.938544] env[69994]: DEBUG nova.virt.hardware [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 799.938732] env[69994]: DEBUG nova.virt.hardware [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 799.938929] env[69994]: DEBUG nova.virt.hardware [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 799.939448] env[69994]: DEBUG nova.virt.hardware [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 799.939774] env[69994]: DEBUG nova.virt.hardware [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 799.939850] env[69994]: DEBUG nova.virt.hardware [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 799.940043] env[69994]: DEBUG nova.virt.hardware [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 799.940285] env[69994]: DEBUG nova.virt.hardware [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 799.940910] env[69994]: DEBUG nova.virt.hardware [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 799.941090] env[69994]: DEBUG nova.virt.hardware [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 799.942177] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2502447f-b003-4975-9e82-dd3aa9f9c235 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.951040] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac78482a-3449-4545-aa69-38d94b633988 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.067587] env[69994]: DEBUG oslo_vmware.api [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241743, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.187360] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cc8c2cd7-fe67-42b1-9650-83c2031b28a7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.310s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 800.191564] env[69994]: DEBUG nova.network.neutron [req-73784484-2f08-4cac-af3c-3e888ac032d8 req-0eb55a6e-6982-4cd7-bbaa-132f0b410a8c service nova] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Updated VIF entry in instance network info cache for port 239a38e8-39e8-487b-ba99-cf85c99d41f1. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 800.191980] env[69994]: DEBUG nova.network.neutron [req-73784484-2f08-4cac-af3c-3e888ac032d8 req-0eb55a6e-6982-4cd7-bbaa-132f0b410a8c service nova] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Updating instance_info_cache with network_info: [{"id": "239a38e8-39e8-487b-ba99-cf85c99d41f1", "address": "fa:16:3e:67:40:01", "network": {"id": "f408d322-470b-4fe5-9b10-67ef413491d1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1757029696-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5543f4937e604cc189cc63c178705112", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap239a38e8-39", "ovs_interfaceid": "239a38e8-39e8-487b-ba99-cf85c99d41f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 800.193429] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.837s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 800.195182] env[69994]: INFO nova.compute.claims [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 800.224800] env[69994]: DEBUG oslo_vmware.api [None req-66238c41-6d22-40d5-a678-ac95eb0180b0 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Task: {'id': task-3241744, 'name': ReconfigVM_Task, 'duration_secs': 0.239789} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.225278] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-66238c41-6d22-40d5-a678-ac95eb0180b0 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Reconfigured VM instance to set the machine id {{(pid=69994) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 800.234713] env[69994]: INFO nova.scheduler.client.report [None req-cc8c2cd7-fe67-42b1-9650-83c2031b28a7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Deleted allocations for instance 5f672fd4-b96f-4506-aa1e-96692a00cb43 [ 800.242775] env[69994]: DEBUG oslo_vmware.api [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241742, 'name': Rename_Task, 'duration_secs': 1.236632} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.243376] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 800.243662] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-940ea33f-6784-4945-9df5-5c8192814cbd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.250959] env[69994]: DEBUG oslo_vmware.api [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for the task: (returnval){ [ 800.250959] env[69994]: value = "task-3241745" [ 800.250959] env[69994]: _type = "Task" [ 800.250959] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.260477] env[69994]: DEBUG oslo_vmware.api [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241745, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.568638] env[69994]: DEBUG oslo_vmware.api [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241743, 'name': PowerOffVM_Task, 'duration_secs': 0.697965} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.568927] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 800.569885] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7fd88e3-3b40-40bb-a77a-5d4980aa0478 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.590185] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1071030-5a1a-4f9d-8317-3a9b3252e01a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.634219] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 800.634548] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c987f762-a2b1-4f4c-b967-dbc4b270e7cf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.643890] env[69994]: DEBUG oslo_vmware.api [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 800.643890] env[69994]: value = "task-3241746" [ 800.643890] env[69994]: _type = "Task" [ 800.643890] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.656040] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] VM already powered off {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 800.657478] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 800.657959] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.658780] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 800.659037] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 800.660300] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-feb80f33-6eb2-4f04-9d81-1d3dd9c9b1ea {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.671428] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 800.671699] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 800.672480] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e4c7580-86b7-4921-9862-fafcf7b997ca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.679256] env[69994]: DEBUG oslo_vmware.api [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 800.679256] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52fd0f9b-3fc8-69e1-9749-1f4d0e7b6085" [ 800.679256] env[69994]: _type = "Task" [ 800.679256] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.689681] env[69994]: DEBUG oslo_vmware.api [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52fd0f9b-3fc8-69e1-9749-1f4d0e7b6085, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.702760] env[69994]: DEBUG oslo_concurrency.lockutils [req-73784484-2f08-4cac-af3c-3e888ac032d8 req-0eb55a6e-6982-4cd7-bbaa-132f0b410a8c service nova] Releasing lock "refresh_cache-15d17772-ac57-49a3-b261-bf49b902f658" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 800.746914] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cc8c2cd7-fe67-42b1-9650-83c2031b28a7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "5f672fd4-b96f-4506-aa1e-96692a00cb43" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.031s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 800.762495] env[69994]: DEBUG oslo_vmware.api [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241745, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.928433] env[69994]: DEBUG nova.objects.instance [None req-c76cb841-10c2-466f-b79d-edf72cae0935 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Lazy-loading 'flavor' on Instance uuid 15d17772-ac57-49a3-b261-bf49b902f658 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 800.952485] env[69994]: DEBUG nova.network.neutron [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Successfully updated port: f7d5e758-a993-4a15-8bba-a695f99a96f4 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 801.062892] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5b9a3a91-1f80-4136-9427-c99b64ce900c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Acquiring lock "7f66a148-86fe-4ddc-b8ed-6e6a306bbc24" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 801.063163] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5b9a3a91-1f80-4136-9427-c99b64ce900c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "7f66a148-86fe-4ddc-b8ed-6e6a306bbc24" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 801.094551] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Acquiring lock "25a64898-568e-4095-aace-f8a564cdf916" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 801.095254] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Lock "25a64898-568e-4095-aace-f8a564cdf916" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 801.191413] env[69994]: DEBUG oslo_vmware.api [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52fd0f9b-3fc8-69e1-9749-1f4d0e7b6085, 'name': SearchDatastore_Task, 'duration_secs': 0.009361} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.192687] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-029963d6-df3c-4c32-ac6a-72590a7faa66 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.197910] env[69994]: DEBUG oslo_vmware.api [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 801.197910] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52846e13-a179-0a08-8369-c71ecff8f67a" [ 801.197910] env[69994]: _type = "Task" [ 801.197910] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.211374] env[69994]: DEBUG oslo_vmware.api [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52846e13-a179-0a08-8369-c71ecff8f67a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.274621] env[69994]: DEBUG oslo_vmware.api [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241745, 'name': PowerOnVM_Task, 'duration_secs': 0.667007} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.278209] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 801.278463] env[69994]: INFO nova.compute.manager [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Took 9.66 seconds to spawn the instance on the hypervisor. [ 801.278656] env[69994]: DEBUG nova.compute.manager [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 801.279756] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f95ee16d-a8f3-44be-9ef1-a51f832ce70a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.438142] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c76cb841-10c2-466f-b79d-edf72cae0935 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Acquiring lock "refresh_cache-15d17772-ac57-49a3-b261-bf49b902f658" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.438439] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c76cb841-10c2-466f-b79d-edf72cae0935 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Acquired lock "refresh_cache-15d17772-ac57-49a3-b261-bf49b902f658" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 801.456141] env[69994]: DEBUG oslo_concurrency.lockutils [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "refresh_cache-eff21ec5-a51d-4004-9edf-1891f706fe9c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.456141] env[69994]: DEBUG oslo_concurrency.lockutils [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquired lock "refresh_cache-eff21ec5-a51d-4004-9edf-1891f706fe9c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 801.456141] env[69994]: DEBUG nova.network.neutron [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 801.568218] env[69994]: DEBUG nova.compute.utils [None req-5b9a3a91-1f80-4136-9427-c99b64ce900c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 801.691303] env[69994]: DEBUG nova.compute.manager [req-773bfc1f-5cd2-48ce-9d0e-5cb37652ce77 req-a7af820c-9d95-4350-8cb8-947664e998df service nova] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Received event network-vif-plugged-f7d5e758-a993-4a15-8bba-a695f99a96f4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 801.691303] env[69994]: DEBUG oslo_concurrency.lockutils [req-773bfc1f-5cd2-48ce-9d0e-5cb37652ce77 req-a7af820c-9d95-4350-8cb8-947664e998df service nova] Acquiring lock "eff21ec5-a51d-4004-9edf-1891f706fe9c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 801.691303] env[69994]: DEBUG oslo_concurrency.lockutils [req-773bfc1f-5cd2-48ce-9d0e-5cb37652ce77 req-a7af820c-9d95-4350-8cb8-947664e998df service nova] Lock "eff21ec5-a51d-4004-9edf-1891f706fe9c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 801.691303] env[69994]: DEBUG oslo_concurrency.lockutils [req-773bfc1f-5cd2-48ce-9d0e-5cb37652ce77 req-a7af820c-9d95-4350-8cb8-947664e998df service nova] Lock "eff21ec5-a51d-4004-9edf-1891f706fe9c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 801.691303] env[69994]: DEBUG nova.compute.manager [req-773bfc1f-5cd2-48ce-9d0e-5cb37652ce77 req-a7af820c-9d95-4350-8cb8-947664e998df service nova] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] No waiting events found dispatching network-vif-plugged-f7d5e758-a993-4a15-8bba-a695f99a96f4 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 801.691303] env[69994]: WARNING nova.compute.manager [req-773bfc1f-5cd2-48ce-9d0e-5cb37652ce77 req-a7af820c-9d95-4350-8cb8-947664e998df service nova] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Received unexpected event network-vif-plugged-f7d5e758-a993-4a15-8bba-a695f99a96f4 for instance with vm_state building and task_state spawning. [ 801.711503] env[69994]: DEBUG oslo_vmware.api [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52846e13-a179-0a08-8369-c71ecff8f67a, 'name': SearchDatastore_Task, 'duration_secs': 0.010301} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.711922] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 801.712305] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] b99b73e6-3348-4d5d-aa57-f01ace0bfc42/cc2e14cc-b12f-480a-a387-dd21e9efda8b-rescue.vmdk. {{(pid=69994) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 801.715602] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1b9ec0f6-c5d7-47d7-9054-80b403836603 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.725761] env[69994]: DEBUG oslo_vmware.api [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 801.725761] env[69994]: value = "task-3241747" [ 801.725761] env[69994]: _type = "Task" [ 801.725761] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.732954] env[69994]: DEBUG oslo_vmware.api [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241747, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.803806] env[69994]: INFO nova.compute.manager [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Took 43.58 seconds to build instance. [ 801.884375] env[69994]: DEBUG nova.network.neutron [None req-c76cb841-10c2-466f-b79d-edf72cae0935 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 801.899421] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7adb71b6-19f1-43d6-b15f-488edc70fbe8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.909315] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7c5a16d-ac9e-41ff-9ed3-ccd1f55816f1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.945689] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38ac7e0e-bf9f-480d-9bfb-9ed5c3d0af1d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.953978] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52ad37fb-55aa-4545-8425-d742304ea4f9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.971486] env[69994]: DEBUG nova.compute.provider_tree [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 801.997096] env[69994]: DEBUG nova.network.neutron [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 802.020144] env[69994]: DEBUG nova.objects.instance [None req-af88643a-585c-4f88-9dea-36edfb9b6c34 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Lazy-loading 'flavor' on Instance uuid 4dbf53e0-caa1-41f4-8376-dfba8d8567cd {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 802.071319] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5b9a3a91-1f80-4136-9427-c99b64ce900c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "7f66a148-86fe-4ddc-b8ed-6e6a306bbc24" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 802.172617] env[69994]: DEBUG nova.network.neutron [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Updating instance_info_cache with network_info: [{"id": "f7d5e758-a993-4a15-8bba-a695f99a96f4", "address": "fa:16:3e:f7:92:8c", "network": {"id": "ac8cc640-01c3-4a64-8b69-1458ee2131a1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1685085861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38d5a89ed7c248c3be506ef12caf5f1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7d5e758-a9", "ovs_interfaceid": "f7d5e758-a993-4a15-8bba-a695f99a96f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.237897] env[69994]: DEBUG oslo_vmware.api [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241747, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.306502] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08301572-d2d1-4287-ae0b-a7e4b5bdd146 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Lock "f07750f5-3f1d-4d97-98dc-285ed357cc7e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 95.124s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 802.475394] env[69994]: DEBUG nova.scheduler.client.report [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 802.512551] env[69994]: DEBUG oslo_vmware.rw_handles [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b02670-38af-b3f7-9372-8a79c4491c9d/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 802.513730] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c86da02-4a80-4a53-817d-fb11b74c2a1d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.525268] env[69994]: DEBUG oslo_vmware.rw_handles [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b02670-38af-b3f7-9372-8a79c4491c9d/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 802.525474] env[69994]: ERROR oslo_vmware.rw_handles [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b02670-38af-b3f7-9372-8a79c4491c9d/disk-0.vmdk due to incomplete transfer. [ 802.525707] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-fbfaf95a-887c-4371-b845-c694f2187bb3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.527607] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af88643a-585c-4f88-9dea-36edfb9b6c34 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Acquiring lock "refresh_cache-4dbf53e0-caa1-41f4-8376-dfba8d8567cd" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.527733] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af88643a-585c-4f88-9dea-36edfb9b6c34 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Acquired lock "refresh_cache-4dbf53e0-caa1-41f4-8376-dfba8d8567cd" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 802.527955] env[69994]: DEBUG nova.network.neutron [None req-af88643a-585c-4f88-9dea-36edfb9b6c34 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 802.528230] env[69994]: DEBUG nova.objects.instance [None req-af88643a-585c-4f88-9dea-36edfb9b6c34 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Lazy-loading 'info_cache' on Instance uuid 4dbf53e0-caa1-41f4-8376-dfba8d8567cd {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 802.537112] env[69994]: DEBUG oslo_vmware.rw_handles [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b02670-38af-b3f7-9372-8a79c4491c9d/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 802.537559] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Uploaded image 1cda4692-a9f9-4b00-b35d-3419adcd1881 to the Glance image server {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 802.539594] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Destroying the VM {{(pid=69994) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 802.540137] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ce5f949b-1127-4503-98e1-2420e6507791 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.547028] env[69994]: DEBUG oslo_vmware.api [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 802.547028] env[69994]: value = "task-3241748" [ 802.547028] env[69994]: _type = "Task" [ 802.547028] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.556890] env[69994]: DEBUG oslo_vmware.api [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241748, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.676240] env[69994]: DEBUG oslo_concurrency.lockutils [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Releasing lock "refresh_cache-eff21ec5-a51d-4004-9edf-1891f706fe9c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 802.676240] env[69994]: DEBUG nova.compute.manager [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Instance network_info: |[{"id": "f7d5e758-a993-4a15-8bba-a695f99a96f4", "address": "fa:16:3e:f7:92:8c", "network": {"id": "ac8cc640-01c3-4a64-8b69-1458ee2131a1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1685085861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38d5a89ed7c248c3be506ef12caf5f1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7d5e758-a9", "ovs_interfaceid": "f7d5e758-a993-4a15-8bba-a695f99a96f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 802.676906] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f7:92:8c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ddfb706a-add1-4e16-9ac4-d20b16a1df6d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f7d5e758-a993-4a15-8bba-a695f99a96f4', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 802.685696] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Creating folder: Project (38d5a89ed7c248c3be506ef12caf5f1e). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 802.689047] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d4d907ff-e305-4698-a464-97434b9dcea2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.701128] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Created folder: Project (38d5a89ed7c248c3be506ef12caf5f1e) in parent group-v647729. [ 802.701337] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Creating folder: Instances. Parent ref: group-v647873. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 802.701612] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4fe6dc02-ad69-4516-bc74-67d2bd2ae955 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.711087] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Created folder: Instances in parent group-v647873. [ 802.711347] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 802.711569] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 802.711780] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7fa4f65b-519d-4ac9-81f2-d8631817b432 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.739327] env[69994]: DEBUG oslo_vmware.api [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241747, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.527645} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.740810] env[69994]: INFO nova.virt.vmwareapi.ds_util [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] b99b73e6-3348-4d5d-aa57-f01ace0bfc42/cc2e14cc-b12f-480a-a387-dd21e9efda8b-rescue.vmdk. [ 802.741083] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 802.741083] env[69994]: value = "task-3241751" [ 802.741083] env[69994]: _type = "Task" [ 802.741083] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.741762] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56f4ba4d-8bb5-49ad-9c00-319504f54cbd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.775488] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Reconfiguring VM instance instance-0000002e to attach disk [datastore2] b99b73e6-3348-4d5d-aa57-f01ace0bfc42/cc2e14cc-b12f-480a-a387-dd21e9efda8b-rescue.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 802.776934] env[69994]: DEBUG nova.network.neutron [None req-c76cb841-10c2-466f-b79d-edf72cae0935 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Updating instance_info_cache with network_info: [{"id": "239a38e8-39e8-487b-ba99-cf85c99d41f1", "address": "fa:16:3e:67:40:01", "network": {"id": "f408d322-470b-4fe5-9b10-67ef413491d1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1757029696-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5543f4937e604cc189cc63c178705112", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap239a38e8-39", "ovs_interfaceid": "239a38e8-39e8-487b-ba99-cf85c99d41f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.778339] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b9eba34e-3a1b-44a3-9b5e-0be891d4a0b2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.797705] env[69994]: DEBUG oslo_vmware.api [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 802.797705] env[69994]: value = "task-3241752" [ 802.797705] env[69994]: _type = "Task" [ 802.797705] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.806426] env[69994]: DEBUG oslo_vmware.api [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241752, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.808918] env[69994]: DEBUG nova.compute.manager [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 802.982917] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.789s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 802.982917] env[69994]: DEBUG nova.compute.manager [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 802.985898] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12910f70-6680-40e9-b3de-545e4f2e914a tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.776s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 802.986189] env[69994]: DEBUG nova.objects.instance [None req-12910f70-6680-40e9-b3de-545e4f2e914a tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Lazy-loading 'resources' on Instance uuid 153f0ead-6e2f-4077-b86a-00d3a1114fed {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 803.032320] env[69994]: DEBUG nova.objects.base [None req-af88643a-585c-4f88-9dea-36edfb9b6c34 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Object Instance<4dbf53e0-caa1-41f4-8376-dfba8d8567cd> lazy-loaded attributes: flavor,info_cache {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 803.058381] env[69994]: DEBUG oslo_vmware.api [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241748, 'name': Destroy_Task, 'duration_secs': 0.426209} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.058381] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Destroyed the VM [ 803.058381] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Deleting Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 803.058882] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-2474b143-4f84-4393-ab5c-c248ecebc09a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.066961] env[69994]: DEBUG oslo_vmware.api [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 803.066961] env[69994]: value = "task-3241753" [ 803.066961] env[69994]: _type = "Task" [ 803.066961] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.080436] env[69994]: DEBUG oslo_vmware.api [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241753, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.169875] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5b9a3a91-1f80-4136-9427-c99b64ce900c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Acquiring lock "7f66a148-86fe-4ddc-b8ed-6e6a306bbc24" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 803.170307] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5b9a3a91-1f80-4136-9427-c99b64ce900c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "7f66a148-86fe-4ddc-b8ed-6e6a306bbc24" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 803.170568] env[69994]: INFO nova.compute.manager [None req-5b9a3a91-1f80-4136-9427-c99b64ce900c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Attaching volume 7e4a5305-889c-4f6f-ae22-6686decd4bae to /dev/sdb [ 803.214334] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3147e17-29a9-443a-825b-088728ae5053 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.221933] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-920f015b-02a6-41ad-936b-f52f12982fad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.235893] env[69994]: DEBUG nova.virt.block_device [None req-5b9a3a91-1f80-4136-9427-c99b64ce900c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Updating existing volume attachment record: b6752619-e211-4e13-9b7b-86f6c05f7e74 {{(pid=69994) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 803.254448] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241751, 'name': CreateVM_Task, 'duration_secs': 0.395061} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.254785] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 803.255537] env[69994]: DEBUG oslo_concurrency.lockutils [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.255537] env[69994]: DEBUG oslo_concurrency.lockutils [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 803.256855] env[69994]: DEBUG oslo_concurrency.lockutils [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 803.256855] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67fcca45-f374-460f-ba51-cfea86bf9074 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.261761] env[69994]: DEBUG oslo_vmware.api [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 803.261761] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52151c00-299d-d17d-9e90-2aa63070b231" [ 803.261761] env[69994]: _type = "Task" [ 803.261761] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.270802] env[69994]: DEBUG oslo_vmware.api [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52151c00-299d-d17d-9e90-2aa63070b231, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.293153] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c76cb841-10c2-466f-b79d-edf72cae0935 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Releasing lock "refresh_cache-15d17772-ac57-49a3-b261-bf49b902f658" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 803.294664] env[69994]: DEBUG nova.compute.manager [None req-c76cb841-10c2-466f-b79d-edf72cae0935 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Inject network info {{(pid=69994) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 803.294664] env[69994]: DEBUG nova.compute.manager [None req-c76cb841-10c2-466f-b79d-edf72cae0935 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] network_info to inject: |[{"id": "239a38e8-39e8-487b-ba99-cf85c99d41f1", "address": "fa:16:3e:67:40:01", "network": {"id": "f408d322-470b-4fe5-9b10-67ef413491d1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1757029696-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5543f4937e604cc189cc63c178705112", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap239a38e8-39", "ovs_interfaceid": "239a38e8-39e8-487b-ba99-cf85c99d41f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 803.299722] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c76cb841-10c2-466f-b79d-edf72cae0935 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Reconfiguring VM instance to set the machine id {{(pid=69994) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 803.302379] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a6370853-742a-445c-8486-9994f391fe62 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.312992] env[69994]: DEBUG oslo_concurrency.lockutils [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "0b975ce0-40a4-48a9-a046-66227636d496" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 803.315211] env[69994]: DEBUG oslo_concurrency.lockutils [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "0b975ce0-40a4-48a9-a046-66227636d496" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 803.336290] env[69994]: DEBUG oslo_vmware.api [None req-c76cb841-10c2-466f-b79d-edf72cae0935 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Waiting for the task: (returnval){ [ 803.336290] env[69994]: value = "task-3241754" [ 803.336290] env[69994]: _type = "Task" [ 803.336290] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.339682] env[69994]: DEBUG oslo_vmware.api [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241752, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.339864] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 803.348332] env[69994]: DEBUG oslo_vmware.api [None req-c76cb841-10c2-466f-b79d-edf72cae0935 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Task: {'id': task-3241754, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.490355] env[69994]: DEBUG nova.compute.utils [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 803.497413] env[69994]: DEBUG nova.compute.manager [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 803.497413] env[69994]: DEBUG nova.network.neutron [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 803.562144] env[69994]: DEBUG nova.policy [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b3148db6e9024ea8a71fe85dfa96aca5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b37c974d2c01403a8e31985dd3e50367', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 803.575600] env[69994]: DEBUG oslo_vmware.api [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241753, 'name': RemoveSnapshot_Task} progress is 36%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.775803] env[69994]: DEBUG oslo_vmware.api [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52151c00-299d-d17d-9e90-2aa63070b231, 'name': SearchDatastore_Task, 'duration_secs': 0.008747} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.778535] env[69994]: DEBUG oslo_concurrency.lockutils [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 803.778786] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 803.779092] env[69994]: DEBUG oslo_concurrency.lockutils [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.779197] env[69994]: DEBUG oslo_concurrency.lockutils [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 803.779379] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 803.782985] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ee7915c6-3ef7-4f3f-b10b-4f953083f4bc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.793925] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 803.794123] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 803.798339] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02fc2ce2-cfff-4282-8486-83592d8648eb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.808317] env[69994]: DEBUG oslo_vmware.api [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 803.808317] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5229ee9f-a3d3-c7b4-739e-c239868d764a" [ 803.808317] env[69994]: _type = "Task" [ 803.808317] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.815343] env[69994]: DEBUG oslo_vmware.api [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241752, 'name': ReconfigVM_Task, 'duration_secs': 0.610895} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.816017] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Reconfigured VM instance instance-0000002e to attach disk [datastore2] b99b73e6-3348-4d5d-aa57-f01ace0bfc42/cc2e14cc-b12f-480a-a387-dd21e9efda8b-rescue.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 803.817218] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb34b0cb-b6ab-4d0c-b4f8-dc5777987297 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.826469] env[69994]: DEBUG oslo_vmware.api [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5229ee9f-a3d3-c7b4-739e-c239868d764a, 'name': SearchDatastore_Task, 'duration_secs': 0.011627} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.832660] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07d9d26e-da09-4f94-8155-1d41c064d4ce {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.864549] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d03275fc-990b-44ff-ba62-de790a0ecae2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.876543] env[69994]: DEBUG oslo_vmware.api [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 803.876543] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52232220-484c-3f32-73c8-622273bb2468" [ 803.876543] env[69994]: _type = "Task" [ 803.876543] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.884311] env[69994]: DEBUG oslo_vmware.api [None req-c76cb841-10c2-466f-b79d-edf72cae0935 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Task: {'id': task-3241754, 'name': ReconfigVM_Task, 'duration_secs': 0.148795} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.885120] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c76cb841-10c2-466f-b79d-edf72cae0935 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Reconfigured VM instance to set the machine id {{(pid=69994) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 803.887193] env[69994]: DEBUG oslo_vmware.api [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 803.887193] env[69994]: value = "task-3241758" [ 803.887193] env[69994]: _type = "Task" [ 803.887193] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.891586] env[69994]: DEBUG oslo_vmware.api [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52232220-484c-3f32-73c8-622273bb2468, 'name': SearchDatastore_Task, 'duration_secs': 0.008935} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.895056] env[69994]: DEBUG oslo_concurrency.lockutils [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 803.895345] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] eff21ec5-a51d-4004-9edf-1891f706fe9c/eff21ec5-a51d-4004-9edf-1891f706fe9c.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 803.898240] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c9cc25d4-8b38-4ec2-9d79-d156c96ff7e9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.900958] env[69994]: DEBUG nova.network.neutron [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Successfully created port: ef23e47f-0bc3-4254-bf4a-51e407cd43b7 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 803.909419] env[69994]: DEBUG oslo_vmware.api [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241758, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.911039] env[69994]: DEBUG oslo_vmware.api [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 803.911039] env[69994]: value = "task-3241759" [ 803.911039] env[69994]: _type = "Task" [ 803.911039] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.922279] env[69994]: DEBUG oslo_vmware.api [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3241759, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.931110] env[69994]: DEBUG nova.compute.manager [req-f3295e8f-b04e-48ce-af98-9e7df3c28dc7 req-029db620-c67d-4085-8f28-49f6a5224080 service nova] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Received event network-changed-f7d5e758-a993-4a15-8bba-a695f99a96f4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 803.931110] env[69994]: DEBUG nova.compute.manager [req-f3295e8f-b04e-48ce-af98-9e7df3c28dc7 req-029db620-c67d-4085-8f28-49f6a5224080 service nova] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Refreshing instance network info cache due to event network-changed-f7d5e758-a993-4a15-8bba-a695f99a96f4. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 803.931360] env[69994]: DEBUG oslo_concurrency.lockutils [req-f3295e8f-b04e-48ce-af98-9e7df3c28dc7 req-029db620-c67d-4085-8f28-49f6a5224080 service nova] Acquiring lock "refresh_cache-eff21ec5-a51d-4004-9edf-1891f706fe9c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.931774] env[69994]: DEBUG oslo_concurrency.lockutils [req-f3295e8f-b04e-48ce-af98-9e7df3c28dc7 req-029db620-c67d-4085-8f28-49f6a5224080 service nova] Acquired lock "refresh_cache-eff21ec5-a51d-4004-9edf-1891f706fe9c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 803.931774] env[69994]: DEBUG nova.network.neutron [req-f3295e8f-b04e-48ce-af98-9e7df3c28dc7 req-029db620-c67d-4085-8f28-49f6a5224080 service nova] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Refreshing network info cache for port f7d5e758-a993-4a15-8bba-a695f99a96f4 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 803.965317] env[69994]: DEBUG nova.network.neutron [None req-af88643a-585c-4f88-9dea-36edfb9b6c34 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Updating instance_info_cache with network_info: [{"id": "064c1f51-da25-4b26-a357-69f406a06504", "address": "fa:16:3e:e9:4b:41", "network": {"id": "53e3917f-ea44-4917-8b99-db03730593c4", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-881972712-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7919e01669a04af68d70ddff8fea2cd3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap064c1f51-da", "ovs_interfaceid": "064c1f51-da25-4b26-a357-69f406a06504", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.997150] env[69994]: DEBUG nova.compute.manager [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 804.079812] env[69994]: DEBUG oslo_vmware.api [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241753, 'name': RemoveSnapshot_Task, 'duration_secs': 0.623801} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.080107] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Deleted Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 804.080337] env[69994]: INFO nova.compute.manager [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Took 14.66 seconds to snapshot the instance on the hypervisor. [ 804.224732] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cccce28e-337a-4f1d-9390-d4f8381c7816 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.235269] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5c677d8-d336-483e-9892-0d9852c64264 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.277749] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4a6025b-fe54-4c5f-9885-b88c1fb4468a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.288060] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b9e8a6c-a916-4e65-97a4-8d8754069ddb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.305686] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7c58e575-c144-42ae-b6e3-4c704cf66ca1 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Acquiring lock "15d17772-ac57-49a3-b261-bf49b902f658" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 804.305947] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7c58e575-c144-42ae-b6e3-4c704cf66ca1 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Lock "15d17772-ac57-49a3-b261-bf49b902f658" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 804.306250] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7c58e575-c144-42ae-b6e3-4c704cf66ca1 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Acquiring lock "15d17772-ac57-49a3-b261-bf49b902f658-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 804.306440] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7c58e575-c144-42ae-b6e3-4c704cf66ca1 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Lock "15d17772-ac57-49a3-b261-bf49b902f658-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 804.306664] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7c58e575-c144-42ae-b6e3-4c704cf66ca1 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Lock "15d17772-ac57-49a3-b261-bf49b902f658-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 804.308746] env[69994]: DEBUG nova.compute.provider_tree [None req-12910f70-6680-40e9-b3de-545e4f2e914a tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 804.310942] env[69994]: INFO nova.compute.manager [None req-7c58e575-c144-42ae-b6e3-4c704cf66ca1 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Terminating instance [ 804.402039] env[69994]: DEBUG oslo_vmware.api [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241758, 'name': ReconfigVM_Task, 'duration_secs': 0.239258} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.402329] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 804.402727] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-da908cc5-028b-434f-8f4f-86351dd7ace4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.410712] env[69994]: DEBUG oslo_vmware.api [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 804.410712] env[69994]: value = "task-3241760" [ 804.410712] env[69994]: _type = "Task" [ 804.410712] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.420993] env[69994]: DEBUG oslo_vmware.api [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241760, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.424425] env[69994]: DEBUG oslo_vmware.api [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3241759, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.490581} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.424736] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] eff21ec5-a51d-4004-9edf-1891f706fe9c/eff21ec5-a51d-4004-9edf-1891f706fe9c.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 804.424967] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 804.425272] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3590cf4d-ef6a-4312-9f7a-a4aedd1c28d5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.431500] env[69994]: DEBUG oslo_vmware.api [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 804.431500] env[69994]: value = "task-3241761" [ 804.431500] env[69994]: _type = "Task" [ 804.431500] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.445254] env[69994]: DEBUG oslo_vmware.api [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3241761, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.467965] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af88643a-585c-4f88-9dea-36edfb9b6c34 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Releasing lock "refresh_cache-4dbf53e0-caa1-41f4-8376-dfba8d8567cd" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 804.638642] env[69994]: DEBUG nova.compute.manager [None req-8322906b-9afb-4fde-9da5-90e0482828ab tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Found 1 images (rotation: 2) {{(pid=69994) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 804.711309] env[69994]: DEBUG nova.network.neutron [req-f3295e8f-b04e-48ce-af98-9e7df3c28dc7 req-029db620-c67d-4085-8f28-49f6a5224080 service nova] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Updated VIF entry in instance network info cache for port f7d5e758-a993-4a15-8bba-a695f99a96f4. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 804.711309] env[69994]: DEBUG nova.network.neutron [req-f3295e8f-b04e-48ce-af98-9e7df3c28dc7 req-029db620-c67d-4085-8f28-49f6a5224080 service nova] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Updating instance_info_cache with network_info: [{"id": "f7d5e758-a993-4a15-8bba-a695f99a96f4", "address": "fa:16:3e:f7:92:8c", "network": {"id": "ac8cc640-01c3-4a64-8b69-1458ee2131a1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1685085861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38d5a89ed7c248c3be506ef12caf5f1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7d5e758-a9", "ovs_interfaceid": "f7d5e758-a993-4a15-8bba-a695f99a96f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.814733] env[69994]: DEBUG nova.scheduler.client.report [None req-12910f70-6680-40e9-b3de-545e4f2e914a tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 804.820612] env[69994]: DEBUG nova.compute.manager [None req-7c58e575-c144-42ae-b6e3-4c704cf66ca1 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 804.820827] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7c58e575-c144-42ae-b6e3-4c704cf66ca1 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 804.821755] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9be08173-d4a4-4e86-b78d-5a3638e55f53 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.838171] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c58e575-c144-42ae-b6e3-4c704cf66ca1 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 804.838642] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-98e78b07-525c-4196-936d-f14e8c2e9f9d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.849518] env[69994]: DEBUG oslo_vmware.api [None req-7c58e575-c144-42ae-b6e3-4c704cf66ca1 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Waiting for the task: (returnval){ [ 804.849518] env[69994]: value = "task-3241762" [ 804.849518] env[69994]: _type = "Task" [ 804.849518] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.865620] env[69994]: DEBUG oslo_vmware.api [None req-7c58e575-c144-42ae-b6e3-4c704cf66ca1 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Task: {'id': task-3241762, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.922607] env[69994]: DEBUG oslo_vmware.api [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241760, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.943373] env[69994]: DEBUG oslo_vmware.api [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3241761, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067727} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.943373] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 804.944370] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70ff5262-a90f-4955-9406-bc7803812744 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.969866] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Reconfiguring VM instance instance-00000030 to attach disk [datastore2] eff21ec5-a51d-4004-9edf-1891f706fe9c/eff21ec5-a51d-4004-9edf-1891f706fe9c.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 804.971726] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4f6e4ee1-9476-4e01-8acd-4f634fdcd335 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.997524] env[69994]: DEBUG oslo_vmware.api [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 804.997524] env[69994]: value = "task-3241763" [ 804.997524] env[69994]: _type = "Task" [ 804.997524] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.009217] env[69994]: DEBUG nova.compute.manager [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 805.011618] env[69994]: DEBUG oslo_vmware.api [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3241763, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.037489] env[69994]: DEBUG nova.virt.hardware [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 805.037750] env[69994]: DEBUG nova.virt.hardware [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 805.037909] env[69994]: DEBUG nova.virt.hardware [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 805.038101] env[69994]: DEBUG nova.virt.hardware [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 805.038251] env[69994]: DEBUG nova.virt.hardware [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 805.038426] env[69994]: DEBUG nova.virt.hardware [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 805.038668] env[69994]: DEBUG nova.virt.hardware [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 805.038838] env[69994]: DEBUG nova.virt.hardware [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 805.039018] env[69994]: DEBUG nova.virt.hardware [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 805.039296] env[69994]: DEBUG nova.virt.hardware [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 805.039610] env[69994]: DEBUG nova.virt.hardware [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 805.040840] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3283734e-3111-4e02-98ad-91cc69855c77 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.049975] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9a4d816-440b-4c14-a4fd-f4170ceb1256 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.214576] env[69994]: DEBUG oslo_concurrency.lockutils [req-f3295e8f-b04e-48ce-af98-9e7df3c28dc7 req-029db620-c67d-4085-8f28-49f6a5224080 service nova] Releasing lock "refresh_cache-eff21ec5-a51d-4004-9edf-1891f706fe9c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 805.214831] env[69994]: DEBUG nova.compute.manager [req-f3295e8f-b04e-48ce-af98-9e7df3c28dc7 req-029db620-c67d-4085-8f28-49f6a5224080 service nova] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Received event network-changed-239a38e8-39e8-487b-ba99-cf85c99d41f1 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 805.215031] env[69994]: DEBUG nova.compute.manager [req-f3295e8f-b04e-48ce-af98-9e7df3c28dc7 req-029db620-c67d-4085-8f28-49f6a5224080 service nova] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Refreshing instance network info cache due to event network-changed-239a38e8-39e8-487b-ba99-cf85c99d41f1. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 805.215255] env[69994]: DEBUG oslo_concurrency.lockutils [req-f3295e8f-b04e-48ce-af98-9e7df3c28dc7 req-029db620-c67d-4085-8f28-49f6a5224080 service nova] Acquiring lock "refresh_cache-15d17772-ac57-49a3-b261-bf49b902f658" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.216015] env[69994]: DEBUG oslo_concurrency.lockutils [req-f3295e8f-b04e-48ce-af98-9e7df3c28dc7 req-029db620-c67d-4085-8f28-49f6a5224080 service nova] Acquired lock "refresh_cache-15d17772-ac57-49a3-b261-bf49b902f658" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 805.216015] env[69994]: DEBUG nova.network.neutron [req-f3295e8f-b04e-48ce-af98-9e7df3c28dc7 req-029db620-c67d-4085-8f28-49f6a5224080 service nova] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Refreshing network info cache for port 239a38e8-39e8-487b-ba99-cf85c99d41f1 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 805.329942] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12910f70-6680-40e9-b3de-545e4f2e914a tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.344s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 805.332473] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.390s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 805.334391] env[69994]: INFO nova.compute.claims [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 805.363694] env[69994]: DEBUG oslo_vmware.api [None req-7c58e575-c144-42ae-b6e3-4c704cf66ca1 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Task: {'id': task-3241762, 'name': PowerOffVM_Task, 'duration_secs': 0.215621} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.364365] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c58e575-c144-42ae-b6e3-4c704cf66ca1 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 805.364365] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7c58e575-c144-42ae-b6e3-4c704cf66ca1 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 805.365057] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bca2280a-5532-4e36-ae57-9260f9c54208 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.394658] env[69994]: INFO nova.scheduler.client.report [None req-12910f70-6680-40e9-b3de-545e4f2e914a tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Deleted allocations for instance 153f0ead-6e2f-4077-b86a-00d3a1114fed [ 805.425026] env[69994]: DEBUG oslo_vmware.api [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241760, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.450680] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7c58e575-c144-42ae-b6e3-4c704cf66ca1 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 805.453024] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7c58e575-c144-42ae-b6e3-4c704cf66ca1 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 805.453024] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c58e575-c144-42ae-b6e3-4c704cf66ca1 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Deleting the datastore file [datastore1] 15d17772-ac57-49a3-b261-bf49b902f658 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 805.453024] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9c440ed8-fa60-4d8b-920c-d3f40cb90c56 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.461271] env[69994]: DEBUG oslo_vmware.api [None req-7c58e575-c144-42ae-b6e3-4c704cf66ca1 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Waiting for the task: (returnval){ [ 805.461271] env[69994]: value = "task-3241765" [ 805.461271] env[69994]: _type = "Task" [ 805.461271] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.473432] env[69994]: DEBUG oslo_vmware.api [None req-7c58e575-c144-42ae-b6e3-4c704cf66ca1 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Task: {'id': task-3241765, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.489415] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-af88643a-585c-4f88-9dea-36edfb9b6c34 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 805.489817] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2f17f221-6fac-4593-807e-2a94ca255e38 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.498822] env[69994]: DEBUG oslo_vmware.api [None req-af88643a-585c-4f88-9dea-36edfb9b6c34 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Waiting for the task: (returnval){ [ 805.498822] env[69994]: value = "task-3241766" [ 805.498822] env[69994]: _type = "Task" [ 805.498822] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.533074] env[69994]: DEBUG oslo_vmware.api [None req-af88643a-585c-4f88-9dea-36edfb9b6c34 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241766, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.533074] env[69994]: DEBUG oslo_vmware.api [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3241763, 'name': ReconfigVM_Task, 'duration_secs': 0.33328} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.533074] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Reconfigured VM instance instance-00000030 to attach disk [datastore2] eff21ec5-a51d-4004-9edf-1891f706fe9c/eff21ec5-a51d-4004-9edf-1891f706fe9c.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 805.533074] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cbf16c05-29f8-4a62-8a2b-861bbb3ec9c8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.543224] env[69994]: DEBUG oslo_vmware.api [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 805.543224] env[69994]: value = "task-3241767" [ 805.543224] env[69994]: _type = "Task" [ 805.543224] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.563375] env[69994]: DEBUG oslo_vmware.api [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3241767, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.749154] env[69994]: DEBUG nova.network.neutron [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Successfully updated port: ef23e47f-0bc3-4254-bf4a-51e407cd43b7 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 805.885502] env[69994]: DEBUG nova.compute.manager [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Stashing vm_state: active {{(pid=69994) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 805.906181] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12910f70-6680-40e9-b3de-545e4f2e914a tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Lock "153f0ead-6e2f-4077-b86a-00d3a1114fed" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.158s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 805.932247] env[69994]: DEBUG oslo_vmware.api [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241760, 'name': PowerOnVM_Task, 'duration_secs': 1.092555} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.933222] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 805.937866] env[69994]: DEBUG nova.compute.manager [None req-d2018e94-1080-437a-b34c-866fa3d4473d tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 805.937866] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-561bcaa6-dc19-4c31-a25f-535ed8e3f1fe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.972384] env[69994]: DEBUG oslo_vmware.api [None req-7c58e575-c144-42ae-b6e3-4c704cf66ca1 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Task: {'id': task-3241765, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.225585} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.972678] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c58e575-c144-42ae-b6e3-4c704cf66ca1 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 805.972859] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7c58e575-c144-42ae-b6e3-4c704cf66ca1 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 805.973047] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7c58e575-c144-42ae-b6e3-4c704cf66ca1 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 805.973233] env[69994]: INFO nova.compute.manager [None req-7c58e575-c144-42ae-b6e3-4c704cf66ca1 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Took 1.15 seconds to destroy the instance on the hypervisor. [ 805.973469] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7c58e575-c144-42ae-b6e3-4c704cf66ca1 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 805.973690] env[69994]: DEBUG nova.compute.manager [-] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 805.973801] env[69994]: DEBUG nova.network.neutron [-] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 806.021043] env[69994]: DEBUG oslo_vmware.api [None req-af88643a-585c-4f88-9dea-36edfb9b6c34 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241766, 'name': PowerOnVM_Task, 'duration_secs': 0.466502} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.021372] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-af88643a-585c-4f88-9dea-36edfb9b6c34 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 806.025021] env[69994]: DEBUG nova.compute.manager [None req-af88643a-585c-4f88-9dea-36edfb9b6c34 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 806.025021] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94f9bb13-5741-4a0d-833a-2f4d939803b8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.059643] env[69994]: DEBUG oslo_vmware.api [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3241767, 'name': Rename_Task, 'duration_secs': 0.197023} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.060684] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 806.060974] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-671da6a6-773a-42ca-9921-096dd0935d8f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.069940] env[69994]: DEBUG oslo_vmware.api [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 806.069940] env[69994]: value = "task-3241769" [ 806.069940] env[69994]: _type = "Task" [ 806.069940] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.079858] env[69994]: DEBUG oslo_vmware.api [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3241769, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.145567] env[69994]: DEBUG nova.network.neutron [req-f3295e8f-b04e-48ce-af98-9e7df3c28dc7 req-029db620-c67d-4085-8f28-49f6a5224080 service nova] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Updated VIF entry in instance network info cache for port 239a38e8-39e8-487b-ba99-cf85c99d41f1. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 806.145916] env[69994]: DEBUG nova.network.neutron [req-f3295e8f-b04e-48ce-af98-9e7df3c28dc7 req-029db620-c67d-4085-8f28-49f6a5224080 service nova] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Updating instance_info_cache with network_info: [{"id": "239a38e8-39e8-487b-ba99-cf85c99d41f1", "address": "fa:16:3e:67:40:01", "network": {"id": "f408d322-470b-4fe5-9b10-67ef413491d1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1757029696-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5543f4937e604cc189cc63c178705112", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap239a38e8-39", "ovs_interfaceid": "239a38e8-39e8-487b-ba99-cf85c99d41f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.201112] env[69994]: DEBUG nova.compute.manager [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 806.201872] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff648d48-5da7-4bae-9261-3763327a2aed {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.251676] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Acquiring lock "refresh_cache-df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.251883] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Acquired lock "refresh_cache-df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 806.252458] env[69994]: DEBUG nova.network.neutron [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 806.326671] env[69994]: DEBUG nova.compute.manager [req-8fea2742-46b9-47e9-b105-cda251686149 req-39a7b9d1-a5b9-4ae9-afe6-4bdfc1630a13 service nova] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Received event network-vif-plugged-ef23e47f-0bc3-4254-bf4a-51e407cd43b7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 806.326967] env[69994]: DEBUG oslo_concurrency.lockutils [req-8fea2742-46b9-47e9-b105-cda251686149 req-39a7b9d1-a5b9-4ae9-afe6-4bdfc1630a13 service nova] Acquiring lock "df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 806.327120] env[69994]: DEBUG oslo_concurrency.lockutils [req-8fea2742-46b9-47e9-b105-cda251686149 req-39a7b9d1-a5b9-4ae9-afe6-4bdfc1630a13 service nova] Lock "df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 806.327475] env[69994]: DEBUG oslo_concurrency.lockutils [req-8fea2742-46b9-47e9-b105-cda251686149 req-39a7b9d1-a5b9-4ae9-afe6-4bdfc1630a13 service nova] Lock "df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 806.327475] env[69994]: DEBUG nova.compute.manager [req-8fea2742-46b9-47e9-b105-cda251686149 req-39a7b9d1-a5b9-4ae9-afe6-4bdfc1630a13 service nova] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] No waiting events found dispatching network-vif-plugged-ef23e47f-0bc3-4254-bf4a-51e407cd43b7 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 806.327598] env[69994]: WARNING nova.compute.manager [req-8fea2742-46b9-47e9-b105-cda251686149 req-39a7b9d1-a5b9-4ae9-afe6-4bdfc1630a13 service nova] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Received unexpected event network-vif-plugged-ef23e47f-0bc3-4254-bf4a-51e407cd43b7 for instance with vm_state building and task_state spawning. [ 806.327753] env[69994]: DEBUG nova.compute.manager [req-8fea2742-46b9-47e9-b105-cda251686149 req-39a7b9d1-a5b9-4ae9-afe6-4bdfc1630a13 service nova] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Received event network-changed-ef23e47f-0bc3-4254-bf4a-51e407cd43b7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 806.328143] env[69994]: DEBUG nova.compute.manager [req-8fea2742-46b9-47e9-b105-cda251686149 req-39a7b9d1-a5b9-4ae9-afe6-4bdfc1630a13 service nova] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Refreshing instance network info cache due to event network-changed-ef23e47f-0bc3-4254-bf4a-51e407cd43b7. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 806.328317] env[69994]: DEBUG oslo_concurrency.lockutils [req-8fea2742-46b9-47e9-b105-cda251686149 req-39a7b9d1-a5b9-4ae9-afe6-4bdfc1630a13 service nova] Acquiring lock "refresh_cache-df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.405892] env[69994]: DEBUG oslo_concurrency.lockutils [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 806.586718] env[69994]: DEBUG oslo_vmware.api [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3241769, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.652820] env[69994]: DEBUG oslo_concurrency.lockutils [req-f3295e8f-b04e-48ce-af98-9e7df3c28dc7 req-029db620-c67d-4085-8f28-49f6a5224080 service nova] Releasing lock "refresh_cache-15d17772-ac57-49a3-b261-bf49b902f658" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 806.719123] env[69994]: INFO nova.compute.manager [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] instance snapshotting [ 806.719123] env[69994]: DEBUG nova.objects.instance [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lazy-loading 'flavor' on Instance uuid 45a8dced-6c49-441c-92e2-ee323ed8753c {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 806.744413] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a8da9ed0-3652-42fb-9a0e-e79c4f574785 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Acquiring lock "e4013007-fd79-4d70-a9d1-70a4c621c0ea" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 806.744897] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a8da9ed0-3652-42fb-9a0e-e79c4f574785 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Lock "e4013007-fd79-4d70-a9d1-70a4c621c0ea" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 806.745355] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a8da9ed0-3652-42fb-9a0e-e79c4f574785 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Acquiring lock "e4013007-fd79-4d70-a9d1-70a4c621c0ea-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 806.745778] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a8da9ed0-3652-42fb-9a0e-e79c4f574785 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Lock "e4013007-fd79-4d70-a9d1-70a4c621c0ea-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 806.746188] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a8da9ed0-3652-42fb-9a0e-e79c4f574785 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Lock "e4013007-fd79-4d70-a9d1-70a4c621c0ea-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 806.751676] env[69994]: INFO nova.compute.manager [None req-a8da9ed0-3652-42fb-9a0e-e79c4f574785 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Terminating instance [ 806.795793] env[69994]: DEBUG nova.network.neutron [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 806.956439] env[69994]: DEBUG nova.network.neutron [-] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.957499] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41d66c1b-0225-4dd2-9b10-1b6606173f4e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.967978] env[69994]: DEBUG nova.network.neutron [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Updating instance_info_cache with network_info: [{"id": "ef23e47f-0bc3-4254-bf4a-51e407cd43b7", "address": "fa:16:3e:88:63:bd", "network": {"id": "e385cc80-bdb6-4ad3-9398-23d0bd27a6c1", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1295849363-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b37c974d2c01403a8e31985dd3e50367", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef23e47f-0b", "ovs_interfaceid": "ef23e47f-0bc3-4254-bf4a-51e407cd43b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.972017] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a53faa5-a3f8-4967-bdd5-c03cb41ad8f6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.018836] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e99efde-4ea0-405c-8f6b-687e3a645ab3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.028098] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32dfeefe-5c84-40d5-88d7-4c275ee215ee {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.045323] env[69994]: DEBUG nova.compute.provider_tree [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 807.082548] env[69994]: DEBUG oslo_vmware.api [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3241769, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.224640] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3723cfa-cc4a-4edc-bd6e-777f279909df {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.249060] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5700af04-c835-44f3-bb3e-3e091acc4d8a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.267311] env[69994]: DEBUG nova.compute.manager [None req-a8da9ed0-3652-42fb-9a0e-e79c4f574785 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 807.267631] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a8da9ed0-3652-42fb-9a0e-e79c4f574785 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 807.268644] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8caec06-109d-4dcb-9874-9b722fb5565f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.278117] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8da9ed0-3652-42fb-9a0e-e79c4f574785 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 807.278515] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ee268ad3-100a-4277-8094-c999ca190724 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.289435] env[69994]: DEBUG oslo_vmware.api [None req-a8da9ed0-3652-42fb-9a0e-e79c4f574785 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Waiting for the task: (returnval){ [ 807.289435] env[69994]: value = "task-3241770" [ 807.289435] env[69994]: _type = "Task" [ 807.289435] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.302069] env[69994]: DEBUG oslo_vmware.api [None req-a8da9ed0-3652-42fb-9a0e-e79c4f574785 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241770, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.462182] env[69994]: INFO nova.compute.manager [-] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Took 1.49 seconds to deallocate network for instance. [ 807.476069] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Releasing lock "refresh_cache-df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 807.476069] env[69994]: DEBUG nova.compute.manager [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Instance network_info: |[{"id": "ef23e47f-0bc3-4254-bf4a-51e407cd43b7", "address": "fa:16:3e:88:63:bd", "network": {"id": "e385cc80-bdb6-4ad3-9398-23d0bd27a6c1", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1295849363-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b37c974d2c01403a8e31985dd3e50367", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef23e47f-0b", "ovs_interfaceid": "ef23e47f-0bc3-4254-bf4a-51e407cd43b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 807.476420] env[69994]: DEBUG oslo_concurrency.lockutils [req-8fea2742-46b9-47e9-b105-cda251686149 req-39a7b9d1-a5b9-4ae9-afe6-4bdfc1630a13 service nova] Acquired lock "refresh_cache-df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 807.476747] env[69994]: DEBUG nova.network.neutron [req-8fea2742-46b9-47e9-b105-cda251686149 req-39a7b9d1-a5b9-4ae9-afe6-4bdfc1630a13 service nova] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Refreshing network info cache for port ef23e47f-0bc3-4254-bf4a-51e407cd43b7 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 807.477757] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:88:63:bd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2e0cfc48-d93b-4477-8082-69a2f7aa7701', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ef23e47f-0bc3-4254-bf4a-51e407cd43b7', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 807.486703] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Creating folder: Project (b37c974d2c01403a8e31985dd3e50367). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 807.491530] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a25aada1-14b4-4d93-a9f5-29b2c1796a37 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.506338] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Created folder: Project (b37c974d2c01403a8e31985dd3e50367) in parent group-v647729. [ 807.506600] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Creating folder: Instances. Parent ref: group-v647878. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 807.506758] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4bbe40ca-67c9-4eff-bf85-e431de400a03 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.519753] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Created folder: Instances in parent group-v647878. [ 807.520228] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 807.520228] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 807.520429] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ce0a2c95-b2c4-4b4b-9a3c-590a50fdf72b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.545842] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 807.545842] env[69994]: value = "task-3241773" [ 807.545842] env[69994]: _type = "Task" [ 807.545842] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.549937] env[69994]: DEBUG nova.scheduler.client.report [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 807.561013] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241773, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.585286] env[69994]: DEBUG oslo_vmware.api [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3241769, 'name': PowerOnVM_Task, 'duration_secs': 1.158046} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.585286] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 807.585286] env[69994]: INFO nova.compute.manager [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Took 7.67 seconds to spawn the instance on the hypervisor. [ 807.585286] env[69994]: DEBUG nova.compute.manager [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 807.585286] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9efd9f9-6d92-496e-8479-c9c3c97ff25c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.761229] env[69994]: DEBUG nova.network.neutron [req-8fea2742-46b9-47e9-b105-cda251686149 req-39a7b9d1-a5b9-4ae9-afe6-4bdfc1630a13 service nova] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Updated VIF entry in instance network info cache for port ef23e47f-0bc3-4254-bf4a-51e407cd43b7. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 807.761229] env[69994]: DEBUG nova.network.neutron [req-8fea2742-46b9-47e9-b105-cda251686149 req-39a7b9d1-a5b9-4ae9-afe6-4bdfc1630a13 service nova] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Updating instance_info_cache with network_info: [{"id": "ef23e47f-0bc3-4254-bf4a-51e407cd43b7", "address": "fa:16:3e:88:63:bd", "network": {"id": "e385cc80-bdb6-4ad3-9398-23d0bd27a6c1", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1295849363-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b37c974d2c01403a8e31985dd3e50367", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef23e47f-0b", "ovs_interfaceid": "ef23e47f-0bc3-4254-bf4a-51e407cd43b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.762678] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Creating Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 807.763636] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-b1d35af7-61b5-4ade-a83d-9657565689af {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.777063] env[69994]: DEBUG oslo_vmware.api [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 807.777063] env[69994]: value = "task-3241774" [ 807.777063] env[69994]: _type = "Task" [ 807.777063] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.788783] env[69994]: DEBUG oslo_vmware.api [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241774, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.801895] env[69994]: DEBUG oslo_vmware.api [None req-a8da9ed0-3652-42fb-9a0e-e79c4f574785 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241770, 'name': PowerOffVM_Task, 'duration_secs': 0.208975} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.802693] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8da9ed0-3652-42fb-9a0e-e79c4f574785 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 807.803086] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a8da9ed0-3652-42fb-9a0e-e79c4f574785 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 807.803494] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-93d60400-3b03-4507-8efc-b33887328943 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.923456] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a8da9ed0-3652-42fb-9a0e-e79c4f574785 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 807.923824] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a8da9ed0-3652-42fb-9a0e-e79c4f574785 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 807.924212] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8da9ed0-3652-42fb-9a0e-e79c4f574785 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Deleting the datastore file [datastore2] e4013007-fd79-4d70-a9d1-70a4c621c0ea {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 807.924832] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2966ed8e-06e1-4434-8166-e0455be6011f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.933886] env[69994]: DEBUG oslo_vmware.api [None req-a8da9ed0-3652-42fb-9a0e-e79c4f574785 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Waiting for the task: (returnval){ [ 807.933886] env[69994]: value = "task-3241776" [ 807.933886] env[69994]: _type = "Task" [ 807.933886] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.946434] env[69994]: DEBUG oslo_vmware.api [None req-a8da9ed0-3652-42fb-9a0e-e79c4f574785 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241776, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.972332] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7c58e575-c144-42ae-b6e3-4c704cf66ca1 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 808.055779] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.723s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 808.056635] env[69994]: DEBUG nova.compute.manager [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 808.069028] env[69994]: DEBUG oslo_concurrency.lockutils [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.733s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 808.071200] env[69994]: INFO nova.compute.claims [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 808.075649] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241773, 'name': CreateVM_Task, 'duration_secs': 0.389821} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.076213] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 808.077164] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.077413] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 808.077860] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 808.078492] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d531d024-0ac4-42c4-8ad5-283f31519212 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.084804] env[69994]: DEBUG oslo_vmware.api [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Waiting for the task: (returnval){ [ 808.084804] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]522b2557-f10b-e4ed-ffef-ba9692bf7af6" [ 808.084804] env[69994]: _type = "Task" [ 808.084804] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.095793] env[69994]: DEBUG oslo_vmware.api [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]522b2557-f10b-e4ed-ffef-ba9692bf7af6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.111142] env[69994]: INFO nova.compute.manager [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Took 45.66 seconds to build instance. [ 808.264341] env[69994]: DEBUG oslo_concurrency.lockutils [req-8fea2742-46b9-47e9-b105-cda251686149 req-39a7b9d1-a5b9-4ae9-afe6-4bdfc1630a13 service nova] Releasing lock "refresh_cache-df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 808.287883] env[69994]: DEBUG oslo_vmware.api [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241774, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.292609] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b9a3a91-1f80-4136-9427-c99b64ce900c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Volume attach. Driver type: vmdk {{(pid=69994) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 808.292841] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b9a3a91-1f80-4136-9427-c99b64ce900c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647877', 'volume_id': '7e4a5305-889c-4f6f-ae22-6686decd4bae', 'name': 'volume-7e4a5305-889c-4f6f-ae22-6686decd4bae', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7f66a148-86fe-4ddc-b8ed-6e6a306bbc24', 'attached_at': '', 'detached_at': '', 'volume_id': '7e4a5305-889c-4f6f-ae22-6686decd4bae', 'serial': '7e4a5305-889c-4f6f-ae22-6686decd4bae'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 808.293784] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b608da7-febe-49ec-8cfb-d02cd7dbfedc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.311484] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7924b3a6-a0bd-4d82-a22b-1a6e49ce079d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.339294] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b9a3a91-1f80-4136-9427-c99b64ce900c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Reconfiguring VM instance instance-00000019 to attach disk [datastore2] volume-7e4a5305-889c-4f6f-ae22-6686decd4bae/volume-7e4a5305-889c-4f6f-ae22-6686decd4bae.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 808.339633] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5463d427-c16e-4ed9-a9f2-1cfee1f4b87a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.361535] env[69994]: DEBUG oslo_vmware.api [None req-5b9a3a91-1f80-4136-9427-c99b64ce900c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for the task: (returnval){ [ 808.361535] env[69994]: value = "task-3241777" [ 808.361535] env[69994]: _type = "Task" [ 808.361535] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.375114] env[69994]: DEBUG oslo_vmware.api [None req-5b9a3a91-1f80-4136-9427-c99b64ce900c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3241777, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.445491] env[69994]: DEBUG oslo_vmware.api [None req-a8da9ed0-3652-42fb-9a0e-e79c4f574785 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Task: {'id': task-3241776, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165877} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.445491] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8da9ed0-3652-42fb-9a0e-e79c4f574785 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 808.445491] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a8da9ed0-3652-42fb-9a0e-e79c4f574785 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 808.445491] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a8da9ed0-3652-42fb-9a0e-e79c4f574785 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 808.445491] env[69994]: INFO nova.compute.manager [None req-a8da9ed0-3652-42fb-9a0e-e79c4f574785 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Took 1.18 seconds to destroy the instance on the hypervisor. [ 808.445838] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a8da9ed0-3652-42fb-9a0e-e79c4f574785 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 808.445900] env[69994]: DEBUG nova.compute.manager [-] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 808.445974] env[69994]: DEBUG nova.network.neutron [-] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 808.579281] env[69994]: DEBUG nova.compute.utils [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 808.586469] env[69994]: DEBUG nova.compute.manager [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 808.586469] env[69994]: DEBUG nova.network.neutron [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 808.603492] env[69994]: DEBUG oslo_vmware.api [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]522b2557-f10b-e4ed-ffef-ba9692bf7af6, 'name': SearchDatastore_Task, 'duration_secs': 0.015454} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.604421] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 808.605435] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 808.605706] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.605858] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 808.606050] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 808.606552] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c28a523b-9d9b-43db-afad-771c6f86697a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.612789] env[69994]: DEBUG oslo_concurrency.lockutils [None req-292982c2-265c-4da3-a579-5a9f23c4b9fa tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "eff21ec5-a51d-4004-9edf-1891f706fe9c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 100.476s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 808.622853] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 808.622853] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 808.622853] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6f5fb85-4273-4db1-81bf-300e592e0251 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.633786] env[69994]: DEBUG oslo_vmware.api [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Waiting for the task: (returnval){ [ 808.633786] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]527b8c39-892c-8320-a03e-f14f93dc21f6" [ 808.633786] env[69994]: _type = "Task" [ 808.633786] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.654294] env[69994]: DEBUG oslo_vmware.api [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]527b8c39-892c-8320-a03e-f14f93dc21f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.654684] env[69994]: DEBUG nova.policy [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '42976cbf12d645ee8bbedf58c7d07603', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '16b66dfea80140689fa05c54842cdf96', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 808.737454] env[69994]: DEBUG nova.compute.manager [req-7327fd3a-d5c4-4228-9655-3ddf84dbc0bb req-37db070b-18f6-4d0a-9167-7bd88596d3c1 service nova] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Received event network-vif-deleted-239a38e8-39e8-487b-ba99-cf85c99d41f1 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 808.788203] env[69994]: DEBUG oslo_vmware.api [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241774, 'name': CreateSnapshot_Task, 'duration_secs': 0.781482} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.788578] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Created Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 808.789412] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d7d4ea9-ecd7-43e3-9b4d-7863e686f3cd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.874583] env[69994]: DEBUG oslo_vmware.api [None req-5b9a3a91-1f80-4136-9427-c99b64ce900c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3241777, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.084480] env[69994]: DEBUG nova.compute.manager [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 809.098333] env[69994]: INFO nova.compute.manager [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Rescuing [ 809.098333] env[69994]: DEBUG oslo_concurrency.lockutils [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquiring lock "refresh_cache-ed662f67-be0e-4f19-bb8a-6af39b4d348c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.098333] env[69994]: DEBUG oslo_concurrency.lockutils [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquired lock "refresh_cache-ed662f67-be0e-4f19-bb8a-6af39b4d348c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 809.098333] env[69994]: DEBUG nova.network.neutron [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 809.115213] env[69994]: DEBUG nova.compute.manager [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 809.155762] env[69994]: DEBUG oslo_vmware.api [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]527b8c39-892c-8320-a03e-f14f93dc21f6, 'name': SearchDatastore_Task, 'duration_secs': 0.020072} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.156752] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e9b39f1-d441-40ff-b24d-ff2af330c0cd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.164264] env[69994]: DEBUG nova.network.neutron [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Successfully created port: 7480edff-8738-4f3c-9cd6-e7d4036d475e {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 809.168429] env[69994]: DEBUG oslo_vmware.api [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Waiting for the task: (returnval){ [ 809.168429] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]520c38a4-97f6-cc8d-c1c1-9b70d1526e95" [ 809.168429] env[69994]: _type = "Task" [ 809.168429] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.179299] env[69994]: DEBUG oslo_vmware.api [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]520c38a4-97f6-cc8d-c1c1-9b70d1526e95, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.243873] env[69994]: DEBUG nova.network.neutron [-] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.317632] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Creating linked-clone VM from snapshot {{(pid=69994) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 809.323781] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-9856c7ee-b9aa-415d-908d-3268bcf8350f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.336917] env[69994]: DEBUG oslo_vmware.api [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 809.336917] env[69994]: value = "task-3241778" [ 809.336917] env[69994]: _type = "Task" [ 809.336917] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.347735] env[69994]: DEBUG oslo_vmware.api [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241778, 'name': CloneVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.374981] env[69994]: DEBUG oslo_vmware.api [None req-5b9a3a91-1f80-4136-9427-c99b64ce900c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3241777, 'name': ReconfigVM_Task, 'duration_secs': 0.89586} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.378026] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b9a3a91-1f80-4136-9427-c99b64ce900c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Reconfigured VM instance instance-00000019 to attach disk [datastore2] volume-7e4a5305-889c-4f6f-ae22-6686decd4bae/volume-7e4a5305-889c-4f6f-ae22-6686decd4bae.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 809.384229] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4b6649c4-1c99-4400-a7dc-ce69c57ae762 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.404206] env[69994]: DEBUG oslo_vmware.api [None req-5b9a3a91-1f80-4136-9427-c99b64ce900c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for the task: (returnval){ [ 809.404206] env[69994]: value = "task-3241779" [ 809.404206] env[69994]: _type = "Task" [ 809.404206] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.415518] env[69994]: DEBUG oslo_vmware.api [None req-5b9a3a91-1f80-4136-9427-c99b64ce900c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3241779, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.647558] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 809.683356] env[69994]: DEBUG oslo_vmware.api [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]520c38a4-97f6-cc8d-c1c1-9b70d1526e95, 'name': SearchDatastore_Task, 'duration_secs': 0.035448} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.686160] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 809.686498] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df/df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 809.686841] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ce26d5b5-0528-44ba-996a-021e05b8797a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.695763] env[69994]: DEBUG oslo_vmware.api [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Waiting for the task: (returnval){ [ 809.695763] env[69994]: value = "task-3241780" [ 809.695763] env[69994]: _type = "Task" [ 809.695763] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.707292] env[69994]: DEBUG oslo_vmware.api [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Task: {'id': task-3241780, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.748194] env[69994]: INFO nova.compute.manager [-] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Took 1.30 seconds to deallocate network for instance. [ 809.807666] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7097461-226f-4702-bb14-7b3cbaaa5d8b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.818599] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53999abb-40c5-4053-b9d5-37e727e8c6de {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.864415] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1efb9350-7712-4b11-95d8-7ec7e6d933db {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.878907] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54a9a832-292e-4217-b3a0-efa60fa45565 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.883635] env[69994]: DEBUG oslo_vmware.api [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241778, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.895877] env[69994]: DEBUG nova.compute.provider_tree [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 809.913561] env[69994]: DEBUG nova.network.neutron [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Updating instance_info_cache with network_info: [{"id": "de3e77dc-7712-4e45-b1d3-fd50595cb0f2", "address": "fa:16:3e:1e:ef:15", "network": {"id": "36dc1c22-7ec0-41a6-a3ca-873fea58c351", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1496166079-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30a9ea2f804f49ec8c5c6861b507454e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde3e77dc-77", "ovs_interfaceid": "de3e77dc-7712-4e45-b1d3-fd50595cb0f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.923714] env[69994]: DEBUG oslo_vmware.api [None req-5b9a3a91-1f80-4136-9427-c99b64ce900c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3241779, 'name': ReconfigVM_Task, 'duration_secs': 0.184176} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.924101] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b9a3a91-1f80-4136-9427-c99b64ce900c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647877', 'volume_id': '7e4a5305-889c-4f6f-ae22-6686decd4bae', 'name': 'volume-7e4a5305-889c-4f6f-ae22-6686decd4bae', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7f66a148-86fe-4ddc-b8ed-6e6a306bbc24', 'attached_at': '', 'detached_at': '', 'volume_id': '7e4a5305-889c-4f6f-ae22-6686decd4bae', 'serial': '7e4a5305-889c-4f6f-ae22-6686decd4bae'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 810.096792] env[69994]: DEBUG nova.compute.manager [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 810.139197] env[69994]: DEBUG nova.virt.hardware [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 810.139197] env[69994]: DEBUG nova.virt.hardware [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 810.139342] env[69994]: DEBUG nova.virt.hardware [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 810.139519] env[69994]: DEBUG nova.virt.hardware [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 810.139662] env[69994]: DEBUG nova.virt.hardware [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 810.139804] env[69994]: DEBUG nova.virt.hardware [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 810.140022] env[69994]: DEBUG nova.virt.hardware [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 810.140191] env[69994]: DEBUG nova.virt.hardware [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 810.140363] env[69994]: DEBUG nova.virt.hardware [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 810.140532] env[69994]: DEBUG nova.virt.hardware [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 810.140701] env[69994]: DEBUG nova.virt.hardware [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 810.147021] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd448f8d-095a-4f34-8845-51a9118176d5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.151529] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a21b633-2083-40a1-ae11-5050f2882bc8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.207464] env[69994]: DEBUG oslo_vmware.api [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Task: {'id': task-3241780, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.255138] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a8da9ed0-3652-42fb-9a0e-e79c4f574785 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 810.368673] env[69994]: DEBUG oslo_vmware.api [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241778, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.400278] env[69994]: DEBUG nova.scheduler.client.report [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 810.415733] env[69994]: DEBUG oslo_concurrency.lockutils [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Releasing lock "refresh_cache-ed662f67-be0e-4f19-bb8a-6af39b4d348c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 810.708268] env[69994]: DEBUG oslo_vmware.api [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Task: {'id': task-3241780, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.629773} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.708589] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df/df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 810.709103] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 810.709378] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6de86943-6456-4526-a3c6-4a0f95b57f45 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.719104] env[69994]: DEBUG oslo_vmware.api [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Waiting for the task: (returnval){ [ 810.719104] env[69994]: value = "task-3241781" [ 810.719104] env[69994]: _type = "Task" [ 810.719104] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.729681] env[69994]: DEBUG oslo_vmware.api [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Task: {'id': task-3241781, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.867323] env[69994]: DEBUG oslo_vmware.api [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241778, 'name': CloneVM_Task} progress is 95%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.904724] env[69994]: DEBUG oslo_concurrency.lockutils [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.836s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 810.905018] env[69994]: DEBUG nova.compute.manager [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 810.907842] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1a1722d4-fc75-4439-be98-c597bf65ea85 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.760s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 810.907842] env[69994]: DEBUG nova.objects.instance [None req-1a1722d4-fc75-4439-be98-c597bf65ea85 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Lazy-loading 'resources' on Instance uuid 558ee84a-731b-4cb1-967d-cf84c8d39718 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 810.915799] env[69994]: DEBUG nova.compute.manager [req-bd205081-32b3-4c6d-9561-69fc7a7a8051 req-e82b0600-e44c-44af-b973-d791b337051d service nova] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Received event network-vif-deleted-150df766-08da-4092-b1e4-2e9c7a7cbf76 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 810.916159] env[69994]: DEBUG nova.compute.manager [req-bd205081-32b3-4c6d-9561-69fc7a7a8051 req-e82b0600-e44c-44af-b973-d791b337051d service nova] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Received event network-changed-f7d5e758-a993-4a15-8bba-a695f99a96f4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 810.916299] env[69994]: DEBUG nova.compute.manager [req-bd205081-32b3-4c6d-9561-69fc7a7a8051 req-e82b0600-e44c-44af-b973-d791b337051d service nova] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Refreshing instance network info cache due to event network-changed-f7d5e758-a993-4a15-8bba-a695f99a96f4. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 810.916509] env[69994]: DEBUG oslo_concurrency.lockutils [req-bd205081-32b3-4c6d-9561-69fc7a7a8051 req-e82b0600-e44c-44af-b973-d791b337051d service nova] Acquiring lock "refresh_cache-eff21ec5-a51d-4004-9edf-1891f706fe9c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 810.918018] env[69994]: DEBUG oslo_concurrency.lockutils [req-bd205081-32b3-4c6d-9561-69fc7a7a8051 req-e82b0600-e44c-44af-b973-d791b337051d service nova] Acquired lock "refresh_cache-eff21ec5-a51d-4004-9edf-1891f706fe9c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 810.918018] env[69994]: DEBUG nova.network.neutron [req-bd205081-32b3-4c6d-9561-69fc7a7a8051 req-e82b0600-e44c-44af-b973-d791b337051d service nova] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Refreshing network info cache for port f7d5e758-a993-4a15-8bba-a695f99a96f4 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 810.961377] env[69994]: DEBUG nova.objects.instance [None req-5b9a3a91-1f80-4136-9427-c99b64ce900c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lazy-loading 'flavor' on Instance uuid 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 810.972158] env[69994]: DEBUG nova.network.neutron [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Successfully updated port: 7480edff-8738-4f3c-9cd6-e7d4036d475e {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 811.006180] env[69994]: DEBUG nova.compute.manager [req-8321182d-804f-4cff-b5d2-0c48392f347a req-26af643f-aece-4faa-b4a0-2117b9be6e80 service nova] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Received event network-vif-plugged-7480edff-8738-4f3c-9cd6-e7d4036d475e {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 811.006453] env[69994]: DEBUG oslo_concurrency.lockutils [req-8321182d-804f-4cff-b5d2-0c48392f347a req-26af643f-aece-4faa-b4a0-2117b9be6e80 service nova] Acquiring lock "dca638aa-c491-431f-a0e5-d02bd76705ad-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 811.006687] env[69994]: DEBUG oslo_concurrency.lockutils [req-8321182d-804f-4cff-b5d2-0c48392f347a req-26af643f-aece-4faa-b4a0-2117b9be6e80 service nova] Lock "dca638aa-c491-431f-a0e5-d02bd76705ad-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 811.007093] env[69994]: DEBUG oslo_concurrency.lockutils [req-8321182d-804f-4cff-b5d2-0c48392f347a req-26af643f-aece-4faa-b4a0-2117b9be6e80 service nova] Lock "dca638aa-c491-431f-a0e5-d02bd76705ad-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 811.007614] env[69994]: DEBUG nova.compute.manager [req-8321182d-804f-4cff-b5d2-0c48392f347a req-26af643f-aece-4faa-b4a0-2117b9be6e80 service nova] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] No waiting events found dispatching network-vif-plugged-7480edff-8738-4f3c-9cd6-e7d4036d475e {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 811.007902] env[69994]: WARNING nova.compute.manager [req-8321182d-804f-4cff-b5d2-0c48392f347a req-26af643f-aece-4faa-b4a0-2117b9be6e80 service nova] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Received unexpected event network-vif-plugged-7480edff-8738-4f3c-9cd6-e7d4036d475e for instance with vm_state building and task_state spawning. [ 811.230586] env[69994]: DEBUG oslo_vmware.api [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Task: {'id': task-3241781, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.153122} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.231027] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 811.231855] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f883b8a-54d1-4070-be4b-6b14943ffdcf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.260767] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Reconfiguring VM instance instance-00000031 to attach disk [datastore2] df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df/df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 811.261138] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8dc5dc1e-713f-463c-a17d-05d8f2310652 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.298021] env[69994]: DEBUG oslo_vmware.api [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Waiting for the task: (returnval){ [ 811.298021] env[69994]: value = "task-3241782" [ 811.298021] env[69994]: _type = "Task" [ 811.298021] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.305153] env[69994]: DEBUG oslo_vmware.api [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Task: {'id': task-3241782, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.370894] env[69994]: DEBUG oslo_vmware.api [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241778, 'name': CloneVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.410818] env[69994]: DEBUG nova.compute.utils [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 811.415572] env[69994]: DEBUG nova.compute.manager [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 811.415766] env[69994]: DEBUG nova.network.neutron [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 811.467184] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5b9a3a91-1f80-4136-9427-c99b64ce900c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "7f66a148-86fe-4ddc-b8ed-6e6a306bbc24" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.297s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 811.471701] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Acquiring lock "refresh_cache-dca638aa-c491-431f-a0e5-d02bd76705ad" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.471848] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Acquired lock "refresh_cache-dca638aa-c491-431f-a0e5-d02bd76705ad" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 811.472000] env[69994]: DEBUG nova.network.neutron [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 811.531632] env[69994]: DEBUG nova.policy [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a50297ffebb845cdb950de24f60cb55a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f035f8fbac46483fb4d70f166df319b6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 811.764716] env[69994]: DEBUG nova.network.neutron [req-bd205081-32b3-4c6d-9561-69fc7a7a8051 req-e82b0600-e44c-44af-b973-d791b337051d service nova] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Updated VIF entry in instance network info cache for port f7d5e758-a993-4a15-8bba-a695f99a96f4. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 811.764716] env[69994]: DEBUG nova.network.neutron [req-bd205081-32b3-4c6d-9561-69fc7a7a8051 req-e82b0600-e44c-44af-b973-d791b337051d service nova] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Updating instance_info_cache with network_info: [{"id": "f7d5e758-a993-4a15-8bba-a695f99a96f4", "address": "fa:16:3e:f7:92:8c", "network": {"id": "ac8cc640-01c3-4a64-8b69-1458ee2131a1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1685085861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38d5a89ed7c248c3be506ef12caf5f1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7d5e758-a9", "ovs_interfaceid": "f7d5e758-a993-4a15-8bba-a695f99a96f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.810330] env[69994]: DEBUG oslo_vmware.api [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Task: {'id': task-3241782, 'name': ReconfigVM_Task, 'duration_secs': 0.348691} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.814052] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Reconfigured VM instance instance-00000031 to attach disk [datastore2] df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df/df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 811.815692] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b0ccac9b-51d4-40e4-9dda-3a3274f26fe7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.823505] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f44c8274-deb8-43c0-980e-46468f5df855 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Acquiring lock "7f66a148-86fe-4ddc-b8ed-6e6a306bbc24" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 811.824287] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f44c8274-deb8-43c0-980e-46468f5df855 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "7f66a148-86fe-4ddc-b8ed-6e6a306bbc24" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 811.824287] env[69994]: DEBUG nova.compute.manager [None req-f44c8274-deb8-43c0-980e-46468f5df855 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 811.825803] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a5edc1f-8f5a-44fa-b2aa-e0f65145c95f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.830460] env[69994]: DEBUG oslo_vmware.api [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Waiting for the task: (returnval){ [ 811.830460] env[69994]: value = "task-3241783" [ 811.830460] env[69994]: _type = "Task" [ 811.830460] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.838226] env[69994]: DEBUG nova.compute.manager [None req-f44c8274-deb8-43c0-980e-46468f5df855 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69994) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 811.838831] env[69994]: DEBUG nova.objects.instance [None req-f44c8274-deb8-43c0-980e-46468f5df855 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lazy-loading 'flavor' on Instance uuid 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 811.854063] env[69994]: DEBUG oslo_vmware.api [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Task: {'id': task-3241783, 'name': Rename_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.869056] env[69994]: DEBUG oslo_vmware.api [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241778, 'name': CloneVM_Task, 'duration_secs': 2.054791} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.869456] env[69994]: INFO nova.virt.vmwareapi.vmops [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Created linked-clone VM from snapshot [ 811.870697] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-497807a1-ce90-46f1-8caf-99dcff119434 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.880881] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Uploading image 87980241-04ac-4f68-8c15-4aa5e319a6ef {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 811.909416] env[69994]: DEBUG oslo_vmware.rw_handles [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 811.909416] env[69994]: value = "vm-647882" [ 811.909416] env[69994]: _type = "VirtualMachine" [ 811.909416] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 811.910246] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c183e7a-280d-4525-97b0-279e2a56dafb tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Acquiring lock "d4f87534-813e-4ff6-8b1f-ee23cb0b8e80" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 811.910246] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c183e7a-280d-4525-97b0-279e2a56dafb tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Lock "d4f87534-813e-4ff6-8b1f-ee23cb0b8e80" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 811.910489] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c183e7a-280d-4525-97b0-279e2a56dafb tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Acquiring lock "d4f87534-813e-4ff6-8b1f-ee23cb0b8e80-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 811.910489] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c183e7a-280d-4525-97b0-279e2a56dafb tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Lock "d4f87534-813e-4ff6-8b1f-ee23cb0b8e80-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 811.910620] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c183e7a-280d-4525-97b0-279e2a56dafb tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Lock "d4f87534-813e-4ff6-8b1f-ee23cb0b8e80-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 811.912159] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-4e9956af-4bc4-4c82-b4cd-6ef733f65cb4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.914081] env[69994]: INFO nova.compute.manager [None req-6c183e7a-280d-4525-97b0-279e2a56dafb tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Terminating instance [ 811.917852] env[69994]: DEBUG nova.compute.manager [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 811.928907] env[69994]: DEBUG oslo_vmware.rw_handles [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lease: (returnval){ [ 811.928907] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52cf58fc-d66b-1742-17ba-7c1ca22c3244" [ 811.928907] env[69994]: _type = "HttpNfcLease" [ 811.928907] env[69994]: } obtained for exporting VM: (result){ [ 811.928907] env[69994]: value = "vm-647882" [ 811.928907] env[69994]: _type = "VirtualMachine" [ 811.928907] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 811.929189] env[69994]: DEBUG oslo_vmware.api [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the lease: (returnval){ [ 811.929189] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52cf58fc-d66b-1742-17ba-7c1ca22c3244" [ 811.929189] env[69994]: _type = "HttpNfcLease" [ 811.929189] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 811.938296] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 811.938296] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52cf58fc-d66b-1742-17ba-7c1ca22c3244" [ 811.938296] env[69994]: _type = "HttpNfcLease" [ 811.938296] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 811.953965] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 811.954620] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a75fbb72-da8e-4a34-8c78-52290a8ae70c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.962519] env[69994]: DEBUG oslo_vmware.api [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 811.962519] env[69994]: value = "task-3241785" [ 811.962519] env[69994]: _type = "Task" [ 811.962519] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.979281] env[69994]: DEBUG oslo_vmware.api [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241785, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.021380] env[69994]: DEBUG nova.network.neutron [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 812.067235] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebd4e5fc-2f56-428d-8984-3b5fa47f4d56 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.079749] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-489dc3f2-e198-4065-aaf6-e480fc8f89f5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.120865] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13f60506-f5c9-4c4d-9bf4-c3888d798db4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.130792] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5cc6daf-134d-4703-8339-6d0f13f7a687 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.149147] env[69994]: DEBUG nova.compute.provider_tree [None req-1a1722d4-fc75-4439-be98-c597bf65ea85 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 812.151586] env[69994]: DEBUG nova.network.neutron [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Successfully created port: e4706905-12e3-43b1-a83a-409585a96042 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 812.233956] env[69994]: DEBUG nova.network.neutron [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Updating instance_info_cache with network_info: [{"id": "7480edff-8738-4f3c-9cd6-e7d4036d475e", "address": "fa:16:3e:bc:8f:60", "network": {"id": "e0a34d39-6f06-4258-9bc2-7b70aa37964a", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-455935249-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16b66dfea80140689fa05c54842cdf96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b7a73c01-1bb9-4612-a1a7-16d71b732e81", "external-id": "nsx-vlan-transportzone-711", "segmentation_id": 711, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7480edff-87", "ovs_interfaceid": "7480edff-8738-4f3c-9cd6-e7d4036d475e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.270338] env[69994]: DEBUG oslo_concurrency.lockutils [req-bd205081-32b3-4c6d-9561-69fc7a7a8051 req-e82b0600-e44c-44af-b973-d791b337051d service nova] Releasing lock "refresh_cache-eff21ec5-a51d-4004-9edf-1891f706fe9c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 812.347952] env[69994]: DEBUG oslo_vmware.api [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Task: {'id': task-3241783, 'name': Rename_Task, 'duration_secs': 0.163386} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.350921] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 812.350921] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fb54d550-3498-43fb-a9ad-bb23ee980a5d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.360573] env[69994]: DEBUG oslo_vmware.api [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Waiting for the task: (returnval){ [ 812.360573] env[69994]: value = "task-3241786" [ 812.360573] env[69994]: _type = "Task" [ 812.360573] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.372760] env[69994]: DEBUG oslo_vmware.api [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Task: {'id': task-3241786, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.427110] env[69994]: DEBUG nova.compute.manager [None req-6c183e7a-280d-4525-97b0-279e2a56dafb tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 812.427731] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6c183e7a-280d-4525-97b0-279e2a56dafb tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 812.428881] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-321c27e3-9dfb-4619-a3a1-1f875998dcf6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.442881] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 812.442881] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52cf58fc-d66b-1742-17ba-7c1ca22c3244" [ 812.442881] env[69994]: _type = "HttpNfcLease" [ 812.442881] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 812.443300] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c183e7a-280d-4525-97b0-279e2a56dafb tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 812.443494] env[69994]: DEBUG oslo_vmware.rw_handles [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 812.443494] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52cf58fc-d66b-1742-17ba-7c1ca22c3244" [ 812.443494] env[69994]: _type = "HttpNfcLease" [ 812.443494] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 812.443703] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6b592147-5716-418b-b5c8-ccfd70a135e4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.446251] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61e43553-08de-464b-b83b-092f493abed2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.454990] env[69994]: DEBUG oslo_vmware.rw_handles [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526112ef-0f27-c6fd-9f2d-268b23adc32f/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 812.455237] env[69994]: DEBUG oslo_vmware.rw_handles [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526112ef-0f27-c6fd-9f2d-268b23adc32f/disk-0.vmdk for reading. {{(pid=69994) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 812.460033] env[69994]: DEBUG oslo_vmware.api [None req-6c183e7a-280d-4525-97b0-279e2a56dafb tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Waiting for the task: (returnval){ [ 812.460033] env[69994]: value = "task-3241787" [ 812.460033] env[69994]: _type = "Task" [ 812.460033] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.527714] env[69994]: DEBUG oslo_vmware.api [None req-6c183e7a-280d-4525-97b0-279e2a56dafb tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241787, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.536695] env[69994]: DEBUG oslo_vmware.api [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241785, 'name': PowerOffVM_Task, 'duration_secs': 0.206085} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.536992] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 812.537913] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9651b587-4794-4dc9-8e84-a43ade4d05d0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.564757] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4a7a285-e22d-4fe5-a883-8d8b023181fd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.568430] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e9fc8418-13f3-4736-a74c-5e34146af967 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.603697] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 812.605237] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ca264c02-b0fe-45e5-901e-84710fc35829 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.615683] env[69994]: DEBUG oslo_vmware.api [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 812.615683] env[69994]: value = "task-3241788" [ 812.615683] env[69994]: _type = "Task" [ 812.615683] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.637133] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] VM already powered off {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 812.637390] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 812.637733] env[69994]: DEBUG oslo_concurrency.lockutils [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.637922] env[69994]: DEBUG oslo_concurrency.lockutils [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 812.638132] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 812.638423] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6d85579f-2c32-4d0d-afd3-d8e1ae0fdb0d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.656141] env[69994]: DEBUG nova.scheduler.client.report [None req-1a1722d4-fc75-4439-be98-c597bf65ea85 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 812.664040] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 812.664040] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 812.664040] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37eaeb07-3c12-4ac6-adc8-12ccdb870654 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.671516] env[69994]: DEBUG oslo_vmware.api [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 812.671516] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]526c8a40-784c-844e-85af-3839ff1cccc6" [ 812.671516] env[69994]: _type = "Task" [ 812.671516] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.682558] env[69994]: DEBUG oslo_vmware.api [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]526c8a40-784c-844e-85af-3839ff1cccc6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.738896] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Releasing lock "refresh_cache-dca638aa-c491-431f-a0e5-d02bd76705ad" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 812.738896] env[69994]: DEBUG nova.compute.manager [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Instance network_info: |[{"id": "7480edff-8738-4f3c-9cd6-e7d4036d475e", "address": "fa:16:3e:bc:8f:60", "network": {"id": "e0a34d39-6f06-4258-9bc2-7b70aa37964a", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-455935249-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16b66dfea80140689fa05c54842cdf96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b7a73c01-1bb9-4612-a1a7-16d71b732e81", "external-id": "nsx-vlan-transportzone-711", "segmentation_id": 711, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7480edff-87", "ovs_interfaceid": "7480edff-8738-4f3c-9cd6-e7d4036d475e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 812.738896] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bc:8f:60', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b7a73c01-1bb9-4612-a1a7-16d71b732e81', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7480edff-8738-4f3c-9cd6-e7d4036d475e', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 812.746725] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 812.747360] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 812.747622] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-96ac46cc-3139-4482-a1b7-40e0a42568b4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.771538] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 812.771538] env[69994]: value = "task-3241789" [ 812.771538] env[69994]: _type = "Task" [ 812.771538] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.784216] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241789, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.851678] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f44c8274-deb8-43c0-980e-46468f5df855 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 812.852943] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-10cb25cc-9c5f-4457-a8e0-091468ac386e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.863170] env[69994]: DEBUG oslo_vmware.api [None req-f44c8274-deb8-43c0-980e-46468f5df855 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for the task: (returnval){ [ 812.863170] env[69994]: value = "task-3241790" [ 812.863170] env[69994]: _type = "Task" [ 812.863170] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.880820] env[69994]: DEBUG oslo_vmware.api [None req-f44c8274-deb8-43c0-980e-46468f5df855 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3241790, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.884627] env[69994]: DEBUG oslo_vmware.api [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Task: {'id': task-3241786, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.931117] env[69994]: DEBUG nova.compute.manager [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 812.959198] env[69994]: DEBUG nova.virt.hardware [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 812.959592] env[69994]: DEBUG nova.virt.hardware [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 812.959793] env[69994]: DEBUG nova.virt.hardware [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 812.960180] env[69994]: DEBUG nova.virt.hardware [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 812.960450] env[69994]: DEBUG nova.virt.hardware [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 812.960658] env[69994]: DEBUG nova.virt.hardware [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 812.961257] env[69994]: DEBUG nova.virt.hardware [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 812.961257] env[69994]: DEBUG nova.virt.hardware [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 812.961257] env[69994]: DEBUG nova.virt.hardware [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 812.962719] env[69994]: DEBUG nova.virt.hardware [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 812.962719] env[69994]: DEBUG nova.virt.hardware [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 812.963999] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c50ef3b5-2553-4ed7-b56b-edbf7a8212ea {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.981616] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b252064-ed07-49c7-9ae3-9eb27fdfea34 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.986735] env[69994]: DEBUG oslo_vmware.api [None req-6c183e7a-280d-4525-97b0-279e2a56dafb tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241787, 'name': PowerOffVM_Task, 'duration_secs': 0.29146} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.987197] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c183e7a-280d-4525-97b0-279e2a56dafb tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 812.987472] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6c183e7a-280d-4525-97b0-279e2a56dafb tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 812.988285] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-afafe5ff-e03e-4339-9d65-b713afe9394c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.077702] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6c183e7a-280d-4525-97b0-279e2a56dafb tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 813.077702] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6c183e7a-280d-4525-97b0-279e2a56dafb tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 813.077917] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c183e7a-280d-4525-97b0-279e2a56dafb tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Deleting the datastore file [datastore1] d4f87534-813e-4ff6-8b1f-ee23cb0b8e80 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 813.078319] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0f02032a-7ecd-4a5f-83fd-35484d002a75 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.088124] env[69994]: DEBUG oslo_vmware.api [None req-6c183e7a-280d-4525-97b0-279e2a56dafb tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Waiting for the task: (returnval){ [ 813.088124] env[69994]: value = "task-3241792" [ 813.088124] env[69994]: _type = "Task" [ 813.088124] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.110050] env[69994]: DEBUG oslo_vmware.api [None req-6c183e7a-280d-4525-97b0-279e2a56dafb tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241792, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.131685] env[69994]: DEBUG nova.compute.manager [req-4a5964c8-fa53-4c86-9af0-52450992957a req-912f649e-bce4-4aab-95ef-c607398d8803 service nova] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Received event network-changed-7480edff-8738-4f3c-9cd6-e7d4036d475e {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 813.133214] env[69994]: DEBUG nova.compute.manager [req-4a5964c8-fa53-4c86-9af0-52450992957a req-912f649e-bce4-4aab-95ef-c607398d8803 service nova] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Refreshing instance network info cache due to event network-changed-7480edff-8738-4f3c-9cd6-e7d4036d475e. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 813.137551] env[69994]: DEBUG oslo_concurrency.lockutils [req-4a5964c8-fa53-4c86-9af0-52450992957a req-912f649e-bce4-4aab-95ef-c607398d8803 service nova] Acquiring lock "refresh_cache-dca638aa-c491-431f-a0e5-d02bd76705ad" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.137901] env[69994]: DEBUG oslo_concurrency.lockutils [req-4a5964c8-fa53-4c86-9af0-52450992957a req-912f649e-bce4-4aab-95ef-c607398d8803 service nova] Acquired lock "refresh_cache-dca638aa-c491-431f-a0e5-d02bd76705ad" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 813.138146] env[69994]: DEBUG nova.network.neutron [req-4a5964c8-fa53-4c86-9af0-52450992957a req-912f649e-bce4-4aab-95ef-c607398d8803 service nova] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Refreshing network info cache for port 7480edff-8738-4f3c-9cd6-e7d4036d475e {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 813.164611] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1a1722d4-fc75-4439-be98-c597bf65ea85 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.257s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 813.169248] env[69994]: DEBUG oslo_concurrency.lockutils [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.609s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 813.171501] env[69994]: INFO nova.compute.claims [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 813.191359] env[69994]: DEBUG oslo_vmware.api [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]526c8a40-784c-844e-85af-3839ff1cccc6, 'name': SearchDatastore_Task, 'duration_secs': 0.012604} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.192721] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b96569e3-db29-408b-8832-56afcbd4284f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.203124] env[69994]: INFO nova.scheduler.client.report [None req-1a1722d4-fc75-4439-be98-c597bf65ea85 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Deleted allocations for instance 558ee84a-731b-4cb1-967d-cf84c8d39718 [ 813.207209] env[69994]: DEBUG oslo_vmware.api [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 813.207209] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52e54fca-455d-3374-16bb-9d082eaa2273" [ 813.207209] env[69994]: _type = "Task" [ 813.207209] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.228660] env[69994]: DEBUG oslo_vmware.api [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52e54fca-455d-3374-16bb-9d082eaa2273, 'name': SearchDatastore_Task, 'duration_secs': 0.013706} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.229367] env[69994]: DEBUG oslo_concurrency.lockutils [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 813.229826] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] ed662f67-be0e-4f19-bb8a-6af39b4d348c/cc2e14cc-b12f-480a-a387-dd21e9efda8b-rescue.vmdk. {{(pid=69994) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 813.231856] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b13da83f-60d7-44c4-a2c5-a56ed972e88d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.243717] env[69994]: DEBUG oslo_vmware.api [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 813.243717] env[69994]: value = "task-3241793" [ 813.243717] env[69994]: _type = "Task" [ 813.243717] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.255081] env[69994]: DEBUG oslo_vmware.api [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241793, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.285729] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241789, 'name': CreateVM_Task, 'duration_secs': 0.422642} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.285985] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 813.286877] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.287112] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 813.288259] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 813.288561] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0fddcddc-73f6-4d53-92a5-1fa8ef4f3ada {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.294597] env[69994]: DEBUG oslo_vmware.api [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for the task: (returnval){ [ 813.294597] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]525a7e41-21bd-5100-56f4-e82ba31875ca" [ 813.294597] env[69994]: _type = "Task" [ 813.294597] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.304673] env[69994]: DEBUG oslo_vmware.api [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]525a7e41-21bd-5100-56f4-e82ba31875ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.379171] env[69994]: DEBUG oslo_vmware.api [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Task: {'id': task-3241786, 'name': PowerOnVM_Task, 'duration_secs': 0.603858} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.380617] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 813.384199] env[69994]: INFO nova.compute.manager [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Took 8.37 seconds to spawn the instance on the hypervisor. [ 813.384199] env[69994]: DEBUG nova.compute.manager [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 813.384199] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-773404a1-e77e-4a38-a121-4b6d49f72a11 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.390569] env[69994]: DEBUG oslo_vmware.api [None req-f44c8274-deb8-43c0-980e-46468f5df855 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3241790, 'name': PowerOffVM_Task, 'duration_secs': 0.449981} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.391593] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f44c8274-deb8-43c0-980e-46468f5df855 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 813.392049] env[69994]: DEBUG nova.compute.manager [None req-f44c8274-deb8-43c0-980e-46468f5df855 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 813.393045] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a94cb4a-f893-4b11-8786-8f4866a72b04 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.601068] env[69994]: DEBUG oslo_vmware.api [None req-6c183e7a-280d-4525-97b0-279e2a56dafb tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241792, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.235804} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.602149] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c183e7a-280d-4525-97b0-279e2a56dafb tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 813.602149] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6c183e7a-280d-4525-97b0-279e2a56dafb tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 813.602149] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6c183e7a-280d-4525-97b0-279e2a56dafb tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 813.602149] env[69994]: INFO nova.compute.manager [None req-6c183e7a-280d-4525-97b0-279e2a56dafb tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Took 1.17 seconds to destroy the instance on the hypervisor. [ 813.603719] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6c183e7a-280d-4525-97b0-279e2a56dafb tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 813.603719] env[69994]: DEBUG nova.compute.manager [-] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 813.603719] env[69994]: DEBUG nova.network.neutron [-] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 813.724745] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1a1722d4-fc75-4439-be98-c597bf65ea85 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Lock "558ee84a-731b-4cb1-967d-cf84c8d39718" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.072s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 813.757397] env[69994]: DEBUG oslo_vmware.api [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241793, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.812578] env[69994]: DEBUG oslo_vmware.api [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]525a7e41-21bd-5100-56f4-e82ba31875ca, 'name': SearchDatastore_Task, 'duration_secs': 0.015575} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.813728] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 813.813728] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 813.814985] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.815656] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 813.815656] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 813.815796] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9569e97c-15f6-466a-91a2-299fdaf64d03 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.830799] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 813.831294] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 813.835934] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bded0aaf-3b26-437b-bba8-c87db17bf57d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.850758] env[69994]: DEBUG oslo_vmware.api [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for the task: (returnval){ [ 813.850758] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52cd5a25-1e20-e42a-d22d-a2cc315cae9b" [ 813.850758] env[69994]: _type = "Task" [ 813.850758] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.862113] env[69994]: DEBUG oslo_vmware.api [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52cd5a25-1e20-e42a-d22d-a2cc315cae9b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.916123] env[69994]: INFO nova.compute.manager [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Took 48.58 seconds to build instance. [ 813.917543] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f44c8274-deb8-43c0-980e-46468f5df855 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "7f66a148-86fe-4ddc-b8ed-6e6a306bbc24" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.093s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 814.048308] env[69994]: DEBUG nova.network.neutron [req-4a5964c8-fa53-4c86-9af0-52450992957a req-912f649e-bce4-4aab-95ef-c607398d8803 service nova] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Updated VIF entry in instance network info cache for port 7480edff-8738-4f3c-9cd6-e7d4036d475e. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 814.048657] env[69994]: DEBUG nova.network.neutron [req-4a5964c8-fa53-4c86-9af0-52450992957a req-912f649e-bce4-4aab-95ef-c607398d8803 service nova] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Updating instance_info_cache with network_info: [{"id": "7480edff-8738-4f3c-9cd6-e7d4036d475e", "address": "fa:16:3e:bc:8f:60", "network": {"id": "e0a34d39-6f06-4258-9bc2-7b70aa37964a", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-455935249-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16b66dfea80140689fa05c54842cdf96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b7a73c01-1bb9-4612-a1a7-16d71b732e81", "external-id": "nsx-vlan-transportzone-711", "segmentation_id": 711, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7480edff-87", "ovs_interfaceid": "7480edff-8738-4f3c-9cd6-e7d4036d475e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.053634] env[69994]: DEBUG nova.network.neutron [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Successfully updated port: e4706905-12e3-43b1-a83a-409585a96042 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 814.166604] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a065b685-6998-44c1-a039-605646c19ef6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Acquiring lock "c47c26c8-3f7f-436b-95aa-0bd08d41e62b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 814.166975] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a065b685-6998-44c1-a039-605646c19ef6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Lock "c47c26c8-3f7f-436b-95aa-0bd08d41e62b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 814.167280] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a065b685-6998-44c1-a039-605646c19ef6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Acquiring lock "c47c26c8-3f7f-436b-95aa-0bd08d41e62b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 814.167599] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a065b685-6998-44c1-a039-605646c19ef6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Lock "c47c26c8-3f7f-436b-95aa-0bd08d41e62b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 814.167780] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a065b685-6998-44c1-a039-605646c19ef6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Lock "c47c26c8-3f7f-436b-95aa-0bd08d41e62b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 814.170686] env[69994]: INFO nova.compute.manager [None req-a065b685-6998-44c1-a039-605646c19ef6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Terminating instance [ 814.254666] env[69994]: DEBUG oslo_vmware.api [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241793, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.675924} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.255044] env[69994]: INFO nova.virt.vmwareapi.ds_util [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] ed662f67-be0e-4f19-bb8a-6af39b4d348c/cc2e14cc-b12f-480a-a387-dd21e9efda8b-rescue.vmdk. [ 814.255888] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2c02a50-b5ca-4c7b-b049-41effed65439 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.286936] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Reconfiguring VM instance instance-0000002d to attach disk [datastore2] ed662f67-be0e-4f19-bb8a-6af39b4d348c/cc2e14cc-b12f-480a-a387-dd21e9efda8b-rescue.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 814.290046] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ffeda0f6-1471-4d54-8a5d-cc46e1cf2865 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.313759] env[69994]: DEBUG oslo_vmware.api [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 814.313759] env[69994]: value = "task-3241794" [ 814.313759] env[69994]: _type = "Task" [ 814.313759] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.326207] env[69994]: DEBUG oslo_vmware.api [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241794, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.362376] env[69994]: DEBUG oslo_vmware.api [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52cd5a25-1e20-e42a-d22d-a2cc315cae9b, 'name': SearchDatastore_Task, 'duration_secs': 0.020065} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.363460] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95946aee-3a2b-4ee0-a51c-5ea637a45ed9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.372768] env[69994]: DEBUG nova.compute.manager [req-eccf8d77-99e5-4017-97e4-14d550811cf5 req-e260889b-245c-459e-b150-eb586d5b2126 service nova] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Received event network-vif-deleted-9cc26e46-d3c4-47b8-bc39-207d9e40b10d {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 814.372978] env[69994]: INFO nova.compute.manager [req-eccf8d77-99e5-4017-97e4-14d550811cf5 req-e260889b-245c-459e-b150-eb586d5b2126 service nova] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Neutron deleted interface 9cc26e46-d3c4-47b8-bc39-207d9e40b10d; detaching it from the instance and deleting it from the info cache [ 814.376021] env[69994]: DEBUG nova.network.neutron [req-eccf8d77-99e5-4017-97e4-14d550811cf5 req-e260889b-245c-459e-b150-eb586d5b2126 service nova] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.376288] env[69994]: DEBUG oslo_vmware.api [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for the task: (returnval){ [ 814.376288] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52828aeb-6daf-c1ab-7d6a-5d8e3fedf481" [ 814.376288] env[69994]: _type = "Task" [ 814.376288] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.390789] env[69994]: DEBUG oslo_vmware.api [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52828aeb-6daf-c1ab-7d6a-5d8e3fedf481, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.419341] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c3b79e73-3e31-4830-a047-463284ca8aa7 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Lock "df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 105.307s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 814.551957] env[69994]: DEBUG oslo_concurrency.lockutils [req-4a5964c8-fa53-4c86-9af0-52450992957a req-912f649e-bce4-4aab-95ef-c607398d8803 service nova] Releasing lock "refresh_cache-dca638aa-c491-431f-a0e5-d02bd76705ad" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 814.553706] env[69994]: DEBUG oslo_concurrency.lockutils [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Acquiring lock "refresh_cache-6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.553843] env[69994]: DEBUG oslo_concurrency.lockutils [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Acquired lock "refresh_cache-6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 814.553996] env[69994]: DEBUG nova.network.neutron [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 814.567250] env[69994]: DEBUG nova.network.neutron [-] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.679181] env[69994]: DEBUG nova.compute.manager [None req-a065b685-6998-44c1-a039-605646c19ef6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 814.679181] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a065b685-6998-44c1-a039-605646c19ef6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 814.679181] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2ee5c64-f749-47ba-bbcc-70fc0c0d37ca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.693821] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a065b685-6998-44c1-a039-605646c19ef6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 814.697591] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-743c1f66-84cb-4cf9-9e43-79b319a97f14 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.707395] env[69994]: DEBUG oslo_vmware.api [None req-a065b685-6998-44c1-a039-605646c19ef6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for the task: (returnval){ [ 814.707395] env[69994]: value = "task-3241795" [ 814.707395] env[69994]: _type = "Task" [ 814.707395] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.726545] env[69994]: DEBUG oslo_vmware.api [None req-a065b685-6998-44c1-a039-605646c19ef6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241795, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.787495] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f43180c-2b35-44fc-a287-9c0602f9698c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.798199] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e169c50c-5df8-4016-998c-1e9cf92b6b93 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.837310] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75e91cdf-5aa0-4328-9463-d50824500275 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.846557] env[69994]: DEBUG oslo_vmware.api [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241794, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.850652] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b6876fa-fd51-4e5b-80e5-baf0411cd7c5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.869666] env[69994]: DEBUG nova.compute.provider_tree [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 814.877707] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b8e985c9-f346-4bf4-a045-81cdaf97362a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.891609] env[69994]: DEBUG oslo_vmware.api [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52828aeb-6daf-c1ab-7d6a-5d8e3fedf481, 'name': SearchDatastore_Task, 'duration_secs': 0.014018} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.893109] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 814.893406] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] dca638aa-c491-431f-a0e5-d02bd76705ad/dca638aa-c491-431f-a0e5-d02bd76705ad.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 814.894207] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-441234b7-d6b0-4d4d-ac57-0cf07efbd646 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.898593] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7d999ef-1364-497c-ac20-47b56a4dd87e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.918999] env[69994]: DEBUG oslo_vmware.api [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for the task: (returnval){ [ 814.918999] env[69994]: value = "task-3241796" [ 814.918999] env[69994]: _type = "Task" [ 814.918999] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.922655] env[69994]: DEBUG nova.compute.manager [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 814.940850] env[69994]: DEBUG nova.compute.manager [req-eccf8d77-99e5-4017-97e4-14d550811cf5 req-e260889b-245c-459e-b150-eb586d5b2126 service nova] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Detach interface failed, port_id=9cc26e46-d3c4-47b8-bc39-207d9e40b10d, reason: Instance d4f87534-813e-4ff6-8b1f-ee23cb0b8e80 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 814.945813] env[69994]: DEBUG oslo_vmware.api [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241796, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.979754] env[69994]: DEBUG nova.objects.instance [None req-96374cf1-e12b-42aa-b6ff-c612bb3a5b48 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lazy-loading 'flavor' on Instance uuid 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 815.071897] env[69994]: INFO nova.compute.manager [-] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Took 1.47 seconds to deallocate network for instance. [ 815.087908] env[69994]: DEBUG nova.network.neutron [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 815.217343] env[69994]: DEBUG oslo_vmware.api [None req-a065b685-6998-44c1-a039-605646c19ef6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241795, 'name': PowerOffVM_Task, 'duration_secs': 0.344648} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.217729] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a065b685-6998-44c1-a039-605646c19ef6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 815.217966] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a065b685-6998-44c1-a039-605646c19ef6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 815.218170] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-62458a82-77bf-4729-942a-45cb5458cb40 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.229068] env[69994]: DEBUG nova.network.neutron [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Updating instance_info_cache with network_info: [{"id": "e4706905-12e3-43b1-a83a-409585a96042", "address": "fa:16:3e:2d:f6:a9", "network": {"id": "ffc05707-d9b8-4f04-8df9-5c384e0942c5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1300021035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f035f8fbac46483fb4d70f166df319b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4706905-12", "ovs_interfaceid": "e4706905-12e3-43b1-a83a-409585a96042", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 815.294155] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cbcce25b-c459-4e7b-b016-2d86b79ffa1c tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Acquiring lock "interface-df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 815.294155] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cbcce25b-c459-4e7b-b016-2d86b79ffa1c tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Lock "interface-df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 815.294578] env[69994]: DEBUG nova.objects.instance [None req-cbcce25b-c459-4e7b-b016-2d86b79ffa1c tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Lazy-loading 'flavor' on Instance uuid df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 815.304372] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a065b685-6998-44c1-a039-605646c19ef6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 815.304585] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a065b685-6998-44c1-a039-605646c19ef6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 815.304843] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-a065b685-6998-44c1-a039-605646c19ef6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Deleting the datastore file [datastore2] c47c26c8-3f7f-436b-95aa-0bd08d41e62b {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 815.305135] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a1b95a64-1524-4021-9faa-969ec1114cc9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.315181] env[69994]: DEBUG oslo_vmware.api [None req-a065b685-6998-44c1-a039-605646c19ef6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for the task: (returnval){ [ 815.315181] env[69994]: value = "task-3241798" [ 815.315181] env[69994]: _type = "Task" [ 815.315181] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.325838] env[69994]: DEBUG oslo_vmware.api [None req-a065b685-6998-44c1-a039-605646c19ef6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241798, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.344853] env[69994]: DEBUG oslo_vmware.api [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241794, 'name': ReconfigVM_Task, 'duration_secs': 0.623413} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.345195] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Reconfigured VM instance instance-0000002d to attach disk [datastore2] ed662f67-be0e-4f19-bb8a-6af39b4d348c/cc2e14cc-b12f-480a-a387-dd21e9efda8b-rescue.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 815.346127] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d9cc756-46a1-463a-a177-3535ea9a5a7b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.376100] env[69994]: DEBUG nova.scheduler.client.report [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 815.380093] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a25e6c29-ab7b-489b-81c0-7734b1a71e7f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.400894] env[69994]: DEBUG oslo_vmware.api [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 815.400894] env[69994]: value = "task-3241799" [ 815.400894] env[69994]: _type = "Task" [ 815.400894] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.411586] env[69994]: DEBUG oslo_vmware.api [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241799, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.433302] env[69994]: DEBUG oslo_vmware.api [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241796, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.464470] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 815.484919] env[69994]: DEBUG oslo_concurrency.lockutils [None req-96374cf1-e12b-42aa-b6ff-c612bb3a5b48 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Acquiring lock "refresh_cache-7f66a148-86fe-4ddc-b8ed-6e6a306bbc24" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.485144] env[69994]: DEBUG oslo_concurrency.lockutils [None req-96374cf1-e12b-42aa-b6ff-c612bb3a5b48 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Acquired lock "refresh_cache-7f66a148-86fe-4ddc-b8ed-6e6a306bbc24" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 815.485347] env[69994]: DEBUG nova.network.neutron [None req-96374cf1-e12b-42aa-b6ff-c612bb3a5b48 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 815.485531] env[69994]: DEBUG nova.objects.instance [None req-96374cf1-e12b-42aa-b6ff-c612bb3a5b48 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lazy-loading 'info_cache' on Instance uuid 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 815.579665] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c183e7a-280d-4525-97b0-279e2a56dafb tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 815.732520] env[69994]: DEBUG oslo_concurrency.lockutils [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Releasing lock "refresh_cache-6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 815.733097] env[69994]: DEBUG nova.compute.manager [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Instance network_info: |[{"id": "e4706905-12e3-43b1-a83a-409585a96042", "address": "fa:16:3e:2d:f6:a9", "network": {"id": "ffc05707-d9b8-4f04-8df9-5c384e0942c5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1300021035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f035f8fbac46483fb4d70f166df319b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4706905-12", "ovs_interfaceid": "e4706905-12e3-43b1-a83a-409585a96042", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 815.733484] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2d:f6:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '35ac9709-fd8b-4630-897a-68ed629d1b11', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e4706905-12e3-43b1-a83a-409585a96042', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 815.743188] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Creating folder: Project (f035f8fbac46483fb4d70f166df319b6). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 815.743593] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ee61fb85-327b-40ec-9be6-efad578edf43 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.759267] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Created folder: Project (f035f8fbac46483fb4d70f166df319b6) in parent group-v647729. [ 815.759571] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Creating folder: Instances. Parent ref: group-v647884. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 815.760261] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-30239247-99e7-4dca-990e-2b06b9e791c3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.773022] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Created folder: Instances in parent group-v647884. [ 815.773338] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 815.773702] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 815.773793] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aa41d459-0b8f-449d-b6fa-c2c287c61c75 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.799361] env[69994]: DEBUG nova.objects.instance [None req-cbcce25b-c459-4e7b-b016-2d86b79ffa1c tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Lazy-loading 'pci_requests' on Instance uuid df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 815.804148] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 815.804148] env[69994]: value = "task-3241802" [ 815.804148] env[69994]: _type = "Task" [ 815.804148] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.814082] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241802, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.830646] env[69994]: DEBUG oslo_vmware.api [None req-a065b685-6998-44c1-a039-605646c19ef6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Task: {'id': task-3241798, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.469248} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.834287] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-a065b685-6998-44c1-a039-605646c19ef6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 815.834521] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a065b685-6998-44c1-a039-605646c19ef6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 815.834738] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a065b685-6998-44c1-a039-605646c19ef6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 815.834947] env[69994]: INFO nova.compute.manager [None req-a065b685-6998-44c1-a039-605646c19ef6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Took 1.16 seconds to destroy the instance on the hypervisor. [ 815.835225] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a065b685-6998-44c1-a039-605646c19ef6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 815.835786] env[69994]: DEBUG nova.compute.manager [-] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 815.835886] env[69994]: DEBUG nova.network.neutron [-] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 815.892718] env[69994]: DEBUG oslo_concurrency.lockutils [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.723s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 815.893374] env[69994]: DEBUG nova.compute.manager [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 815.896414] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.734s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 815.898183] env[69994]: INFO nova.compute.claims [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 815.914360] env[69994]: DEBUG oslo_vmware.api [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241799, 'name': ReconfigVM_Task, 'duration_secs': 0.241758} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.914360] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 815.914360] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7faeeb4e-3457-424a-bb43-202485489336 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.922994] env[69994]: DEBUG oslo_vmware.api [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 815.922994] env[69994]: value = "task-3241803" [ 815.922994] env[69994]: _type = "Task" [ 815.922994] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.939532] env[69994]: DEBUG oslo_vmware.api [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241796, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.732251} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.943942] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] dca638aa-c491-431f-a0e5-d02bd76705ad/dca638aa-c491-431f-a0e5-d02bd76705ad.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 815.943942] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 815.943942] env[69994]: DEBUG oslo_vmware.api [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241803, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.945214] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b761b433-c53e-4a62-bf36-433066695d13 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.952693] env[69994]: DEBUG oslo_vmware.api [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for the task: (returnval){ [ 815.952693] env[69994]: value = "task-3241804" [ 815.952693] env[69994]: _type = "Task" [ 815.952693] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.965708] env[69994]: DEBUG oslo_vmware.api [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241804, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.989450] env[69994]: DEBUG nova.objects.base [None req-96374cf1-e12b-42aa-b6ff-c612bb3a5b48 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Object Instance<7f66a148-86fe-4ddc-b8ed-6e6a306bbc24> lazy-loaded attributes: flavor,info_cache {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 816.075579] env[69994]: DEBUG nova.compute.manager [req-3b3e2e82-6448-41c4-997a-a04de493c445 req-13695548-35d9-4148-be0d-ace07b479fd3 service nova] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Received event network-vif-plugged-e4706905-12e3-43b1-a83a-409585a96042 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 816.075743] env[69994]: DEBUG oslo_concurrency.lockutils [req-3b3e2e82-6448-41c4-997a-a04de493c445 req-13695548-35d9-4148-be0d-ace07b479fd3 service nova] Acquiring lock "6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 816.075974] env[69994]: DEBUG oslo_concurrency.lockutils [req-3b3e2e82-6448-41c4-997a-a04de493c445 req-13695548-35d9-4148-be0d-ace07b479fd3 service nova] Lock "6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 816.076179] env[69994]: DEBUG oslo_concurrency.lockutils [req-3b3e2e82-6448-41c4-997a-a04de493c445 req-13695548-35d9-4148-be0d-ace07b479fd3 service nova] Lock "6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 816.076400] env[69994]: DEBUG nova.compute.manager [req-3b3e2e82-6448-41c4-997a-a04de493c445 req-13695548-35d9-4148-be0d-ace07b479fd3 service nova] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] No waiting events found dispatching network-vif-plugged-e4706905-12e3-43b1-a83a-409585a96042 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 816.076593] env[69994]: WARNING nova.compute.manager [req-3b3e2e82-6448-41c4-997a-a04de493c445 req-13695548-35d9-4148-be0d-ace07b479fd3 service nova] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Received unexpected event network-vif-plugged-e4706905-12e3-43b1-a83a-409585a96042 for instance with vm_state building and task_state spawning. [ 816.076885] env[69994]: DEBUG nova.compute.manager [req-3b3e2e82-6448-41c4-997a-a04de493c445 req-13695548-35d9-4148-be0d-ace07b479fd3 service nova] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Received event network-changed-e4706905-12e3-43b1-a83a-409585a96042 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 816.076966] env[69994]: DEBUG nova.compute.manager [req-3b3e2e82-6448-41c4-997a-a04de493c445 req-13695548-35d9-4148-be0d-ace07b479fd3 service nova] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Refreshing instance network info cache due to event network-changed-e4706905-12e3-43b1-a83a-409585a96042. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 816.077135] env[69994]: DEBUG oslo_concurrency.lockutils [req-3b3e2e82-6448-41c4-997a-a04de493c445 req-13695548-35d9-4148-be0d-ace07b479fd3 service nova] Acquiring lock "refresh_cache-6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.077274] env[69994]: DEBUG oslo_concurrency.lockutils [req-3b3e2e82-6448-41c4-997a-a04de493c445 req-13695548-35d9-4148-be0d-ace07b479fd3 service nova] Acquired lock "refresh_cache-6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 816.077433] env[69994]: DEBUG nova.network.neutron [req-3b3e2e82-6448-41c4-997a-a04de493c445 req-13695548-35d9-4148-be0d-ace07b479fd3 service nova] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Refreshing network info cache for port e4706905-12e3-43b1-a83a-409585a96042 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 816.302496] env[69994]: DEBUG nova.objects.base [None req-cbcce25b-c459-4e7b-b016-2d86b79ffa1c tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 816.302496] env[69994]: DEBUG nova.network.neutron [None req-cbcce25b-c459-4e7b-b016-2d86b79ffa1c tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 816.314396] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241802, 'name': CreateVM_Task, 'duration_secs': 0.440119} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.314593] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 816.315249] env[69994]: DEBUG oslo_concurrency.lockutils [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.315414] env[69994]: DEBUG oslo_concurrency.lockutils [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 816.315745] env[69994]: DEBUG oslo_concurrency.lockutils [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 816.316409] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e64661f-05a5-4012-b9ce-b01feec21535 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.321372] env[69994]: DEBUG oslo_vmware.api [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for the task: (returnval){ [ 816.321372] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]527128b1-fffa-4293-8b31-be97b6fa7605" [ 816.321372] env[69994]: _type = "Task" [ 816.321372] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.330251] env[69994]: DEBUG oslo_vmware.api [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]527128b1-fffa-4293-8b31-be97b6fa7605, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.391252] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cbcce25b-c459-4e7b-b016-2d86b79ffa1c tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Lock "interface-df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.098s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 816.405951] env[69994]: DEBUG nova.compute.utils [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 816.408367] env[69994]: DEBUG nova.compute.manager [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 816.408367] env[69994]: DEBUG nova.network.neutron [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 816.438205] env[69994]: DEBUG oslo_vmware.api [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241803, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.452868] env[69994]: DEBUG nova.policy [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8a9dcb69ce924c0fb3145b7652f55266', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7422894c60b84a4ba1eb1b4dee4920c1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 816.463468] env[69994]: DEBUG oslo_vmware.api [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241804, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076087} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.463747] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 816.464581] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-263f7ffa-bb26-401f-9d85-10acb68d8fc4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.488807] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] dca638aa-c491-431f-a0e5-d02bd76705ad/dca638aa-c491-431f-a0e5-d02bd76705ad.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 816.489030] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ea072040-865a-4dcf-82de-5483101d4123 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.512661] env[69994]: DEBUG oslo_vmware.api [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for the task: (returnval){ [ 816.512661] env[69994]: value = "task-3241805" [ 816.512661] env[69994]: _type = "Task" [ 816.512661] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.523704] env[69994]: DEBUG oslo_vmware.api [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241805, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.630377] env[69994]: DEBUG nova.network.neutron [-] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 816.808337] env[69994]: DEBUG nova.network.neutron [None req-96374cf1-e12b-42aa-b6ff-c612bb3a5b48 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Updating instance_info_cache with network_info: [{"id": "f2652bdf-bba7-4a73-9045-397e55945ed1", "address": "fa:16:3e:c5:70:d6", "network": {"id": "6e027deb-e9dd-4d16-a0cf-5c75f1d53722", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1614407267-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.188", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7799f51750bb4c2589042a3b7bc8af01", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721c6720-3ce0-450e-9951-a894f03acc27", "external-id": "nsx-vlan-transportzone-394", "segmentation_id": 394, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2652bdf-bb", "ovs_interfaceid": "f2652bdf-bba7-4a73-9045-397e55945ed1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 816.840236] env[69994]: DEBUG oslo_vmware.api [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]527128b1-fffa-4293-8b31-be97b6fa7605, 'name': SearchDatastore_Task, 'duration_secs': 0.033049} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.840701] env[69994]: DEBUG oslo_concurrency.lockutils [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 816.841063] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 816.841446] env[69994]: DEBUG oslo_concurrency.lockutils [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.841652] env[69994]: DEBUG oslo_concurrency.lockutils [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 816.842252] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 816.842334] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-533d610a-0ed3-4340-b6b3-4c6470370e39 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.861243] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 816.861403] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 816.862216] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c66450c-057d-48f9-bdbb-1b7a8f2c94c5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.868517] env[69994]: DEBUG oslo_vmware.api [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for the task: (returnval){ [ 816.868517] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5251c164-ec5d-f7cc-21d9-ba1edb1f854e" [ 816.868517] env[69994]: _type = "Task" [ 816.868517] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.881504] env[69994]: DEBUG oslo_vmware.api [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5251c164-ec5d-f7cc-21d9-ba1edb1f854e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.902682] env[69994]: DEBUG nova.network.neutron [req-3b3e2e82-6448-41c4-997a-a04de493c445 req-13695548-35d9-4148-be0d-ace07b479fd3 service nova] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Updated VIF entry in instance network info cache for port e4706905-12e3-43b1-a83a-409585a96042. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 816.903087] env[69994]: DEBUG nova.network.neutron [req-3b3e2e82-6448-41c4-997a-a04de493c445 req-13695548-35d9-4148-be0d-ace07b479fd3 service nova] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Updating instance_info_cache with network_info: [{"id": "e4706905-12e3-43b1-a83a-409585a96042", "address": "fa:16:3e:2d:f6:a9", "network": {"id": "ffc05707-d9b8-4f04-8df9-5c384e0942c5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1300021035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f035f8fbac46483fb4d70f166df319b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4706905-12", "ovs_interfaceid": "e4706905-12e3-43b1-a83a-409585a96042", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 816.908257] env[69994]: DEBUG nova.network.neutron [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Successfully created port: 2027b219-02f1-4669-80e5-0d03b45b5562 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 816.913299] env[69994]: DEBUG nova.compute.manager [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 816.946308] env[69994]: DEBUG oslo_vmware.api [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241803, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.023770] env[69994]: DEBUG oslo_vmware.api [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241805, 'name': ReconfigVM_Task, 'duration_secs': 0.408021} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.024110] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Reconfigured VM instance instance-00000032 to attach disk [datastore1] dca638aa-c491-431f-a0e5-d02bd76705ad/dca638aa-c491-431f-a0e5-d02bd76705ad.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 817.024807] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-92b8007c-5659-47ac-8c03-1c150ff149fe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.035281] env[69994]: DEBUG oslo_vmware.api [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for the task: (returnval){ [ 817.035281] env[69994]: value = "task-3241806" [ 817.035281] env[69994]: _type = "Task" [ 817.035281] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.045736] env[69994]: DEBUG oslo_vmware.api [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241806, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.136209] env[69994]: INFO nova.compute.manager [-] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Took 1.30 seconds to deallocate network for instance. [ 817.316709] env[69994]: DEBUG oslo_concurrency.lockutils [None req-96374cf1-e12b-42aa-b6ff-c612bb3a5b48 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Releasing lock "refresh_cache-7f66a148-86fe-4ddc-b8ed-6e6a306bbc24" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 817.382018] env[69994]: DEBUG oslo_vmware.api [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5251c164-ec5d-f7cc-21d9-ba1edb1f854e, 'name': SearchDatastore_Task, 'duration_secs': 0.023495} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.385479] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0593a95d-b360-478a-8be1-689c608664c4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.391848] env[69994]: DEBUG oslo_vmware.api [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for the task: (returnval){ [ 817.391848] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52cec9c6-4a57-65d6-dd1e-343da56d24de" [ 817.391848] env[69994]: _type = "Task" [ 817.391848] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.406342] env[69994]: DEBUG oslo_concurrency.lockutils [req-3b3e2e82-6448-41c4-997a-a04de493c445 req-13695548-35d9-4148-be0d-ace07b479fd3 service nova] Releasing lock "refresh_cache-6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 817.406703] env[69994]: DEBUG oslo_vmware.api [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52cec9c6-4a57-65d6-dd1e-343da56d24de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.444121] env[69994]: DEBUG oslo_vmware.api [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241803, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.462578] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6882556-aec5-471a-ba28-22abc83a9ee8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.470960] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-216cf58d-d317-4033-a81e-7995685ad631 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.505685] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b5f320c-a863-49f0-8672-1e7675a1caf4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.514231] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22a55fc5-b18f-4ee9-92aa-58860d8bf10e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.528643] env[69994]: DEBUG nova.compute.provider_tree [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 817.542902] env[69994]: DEBUG nova.compute.manager [req-b1969689-120c-42d8-ba68-1800193a9864 req-c2eceb22-0b6f-4a94-999f-dae7033d88fe service nova] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Received event network-vif-deleted-641f1973-439b-47b8-a402-9d7a8557e0c2 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 817.547992] env[69994]: DEBUG oslo_vmware.api [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241806, 'name': Rename_Task, 'duration_secs': 0.248994} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.548346] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 817.548530] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c0c34200-bc4b-4fba-a001-7a6b84961198 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.556101] env[69994]: DEBUG oslo_vmware.api [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for the task: (returnval){ [ 817.556101] env[69994]: value = "task-3241807" [ 817.556101] env[69994]: _type = "Task" [ 817.556101] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.564790] env[69994]: DEBUG oslo_vmware.api [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241807, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.648230] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a065b685-6998-44c1-a039-605646c19ef6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 817.909017] env[69994]: DEBUG oslo_vmware.api [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52cec9c6-4a57-65d6-dd1e-343da56d24de, 'name': SearchDatastore_Task, 'duration_secs': 0.016815} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.909419] env[69994]: DEBUG oslo_concurrency.lockutils [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 817.909790] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e/6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 817.910153] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0f3d4300-51a5-4dfb-a310-fc64848241ac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.919474] env[69994]: DEBUG oslo_vmware.api [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for the task: (returnval){ [ 817.919474] env[69994]: value = "task-3241808" [ 817.919474] env[69994]: _type = "Task" [ 817.919474] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.925833] env[69994]: DEBUG nova.compute.manager [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 817.936133] env[69994]: DEBUG oslo_vmware.api [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3241808, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.947643] env[69994]: DEBUG oslo_vmware.api [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241803, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.956883] env[69994]: DEBUG nova.virt.hardware [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 817.957160] env[69994]: DEBUG nova.virt.hardware [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 817.957320] env[69994]: DEBUG nova.virt.hardware [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 817.957522] env[69994]: DEBUG nova.virt.hardware [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 817.957683] env[69994]: DEBUG nova.virt.hardware [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 817.957834] env[69994]: DEBUG nova.virt.hardware [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 817.958108] env[69994]: DEBUG nova.virt.hardware [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 817.958286] env[69994]: DEBUG nova.virt.hardware [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 817.958535] env[69994]: DEBUG nova.virt.hardware [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 817.958645] env[69994]: DEBUG nova.virt.hardware [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 817.958832] env[69994]: DEBUG nova.virt.hardware [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 817.959806] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b795cfc-4fec-493e-958c-c6b4464647d1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.968487] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b39b999e-d544-4a1f-873a-928fd74a2a4f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.032384] env[69994]: DEBUG nova.scheduler.client.report [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 818.068710] env[69994]: DEBUG oslo_vmware.api [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241807, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.324964] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-96374cf1-e12b-42aa-b6ff-c612bb3a5b48 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 818.325584] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-86eb3b38-f16f-4fbc-a6e4-048ab12c6a6b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.335407] env[69994]: DEBUG oslo_vmware.api [None req-96374cf1-e12b-42aa-b6ff-c612bb3a5b48 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for the task: (returnval){ [ 818.335407] env[69994]: value = "task-3241809" [ 818.335407] env[69994]: _type = "Task" [ 818.335407] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.345366] env[69994]: DEBUG oslo_vmware.api [None req-96374cf1-e12b-42aa-b6ff-c612bb3a5b48 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3241809, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.433025] env[69994]: DEBUG oslo_vmware.api [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3241808, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.449349] env[69994]: DEBUG oslo_vmware.api [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241803, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.449963] env[69994]: DEBUG nova.network.neutron [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Successfully updated port: 2027b219-02f1-4669-80e5-0d03b45b5562 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 818.537842] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.641s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 818.538600] env[69994]: DEBUG nova.compute.manager [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 818.542085] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.376s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 818.543953] env[69994]: INFO nova.compute.claims [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 818.568267] env[69994]: DEBUG oslo_vmware.api [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241807, 'name': PowerOnVM_Task, 'duration_secs': 0.540371} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.569036] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 818.569036] env[69994]: INFO nova.compute.manager [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Took 8.47 seconds to spawn the instance on the hypervisor. [ 818.569036] env[69994]: DEBUG nova.compute.manager [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 818.570193] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-352de704-6824-407a-a6d8-8a215b766316 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.812992] env[69994]: DEBUG oslo_concurrency.lockutils [None req-90c5962c-3382-44b1-af3f-016a41b350b1 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Acquiring lock "df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 818.812992] env[69994]: DEBUG oslo_concurrency.lockutils [None req-90c5962c-3382-44b1-af3f-016a41b350b1 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Lock "df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 818.812992] env[69994]: DEBUG oslo_concurrency.lockutils [None req-90c5962c-3382-44b1-af3f-016a41b350b1 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Acquiring lock "df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 818.812992] env[69994]: DEBUG oslo_concurrency.lockutils [None req-90c5962c-3382-44b1-af3f-016a41b350b1 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Lock "df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 818.812992] env[69994]: DEBUG oslo_concurrency.lockutils [None req-90c5962c-3382-44b1-af3f-016a41b350b1 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Lock "df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 818.815270] env[69994]: INFO nova.compute.manager [None req-90c5962c-3382-44b1-af3f-016a41b350b1 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Terminating instance [ 818.848786] env[69994]: DEBUG oslo_vmware.api [None req-96374cf1-e12b-42aa-b6ff-c612bb3a5b48 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3241809, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.930740] env[69994]: DEBUG oslo_vmware.api [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3241808, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.944994] env[69994]: DEBUG oslo_vmware.api [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241803, 'name': PowerOnVM_Task, 'duration_secs': 2.572543} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.945286] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 818.948185] env[69994]: DEBUG nova.compute.manager [None req-81a94752-5534-48b2-bb21-68113b009118 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 818.949152] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c5d808d-9d5a-4c00-87c7-b0ab3596af9c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.952330] env[69994]: DEBUG oslo_concurrency.lockutils [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Acquiring lock "refresh_cache-2d812174-d2ad-4fac-8ae5-ffa51d691374" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.952471] env[69994]: DEBUG oslo_concurrency.lockutils [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Acquired lock "refresh_cache-2d812174-d2ad-4fac-8ae5-ffa51d691374" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 818.952613] env[69994]: DEBUG nova.network.neutron [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 819.049351] env[69994]: DEBUG nova.compute.utils [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 819.053529] env[69994]: DEBUG nova.compute.manager [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 819.053740] env[69994]: DEBUG nova.network.neutron [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 819.094413] env[69994]: DEBUG nova.policy [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2ccfdf4c5e604bb3a5eca0ac5727774c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e5acf9a4a9344d4c9c91b75e83cf7a76', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 819.096900] env[69994]: INFO nova.compute.manager [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Took 41.17 seconds to build instance. [ 819.320383] env[69994]: DEBUG nova.compute.manager [None req-90c5962c-3382-44b1-af3f-016a41b350b1 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 819.320383] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-90c5962c-3382-44b1-af3f-016a41b350b1 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 819.321352] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89299d2d-2545-4d76-9d44-a565c5d1afe3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.331125] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-90c5962c-3382-44b1-af3f-016a41b350b1 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 819.331458] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-017a65d8-e6c3-461d-bc6d-10225c23e409 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.342891] env[69994]: DEBUG oslo_vmware.api [None req-90c5962c-3382-44b1-af3f-016a41b350b1 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Waiting for the task: (returnval){ [ 819.342891] env[69994]: value = "task-3241810" [ 819.342891] env[69994]: _type = "Task" [ 819.342891] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.351718] env[69994]: DEBUG oslo_vmware.api [None req-96374cf1-e12b-42aa-b6ff-c612bb3a5b48 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3241809, 'name': PowerOnVM_Task, 'duration_secs': 0.549885} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.352587] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-96374cf1-e12b-42aa-b6ff-c612bb3a5b48 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 819.353386] env[69994]: DEBUG nova.compute.manager [None req-96374cf1-e12b-42aa-b6ff-c612bb3a5b48 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 819.353777] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-268caef1-2764-4f47-8122-f665ec789373 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.361326] env[69994]: DEBUG oslo_vmware.api [None req-90c5962c-3382-44b1-af3f-016a41b350b1 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Task: {'id': task-3241810, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.427533] env[69994]: DEBUG nova.network.neutron [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Successfully created port: b02b6f7a-67ae-46ce-aaa8-77cd472b8714 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 819.440947] env[69994]: DEBUG oslo_vmware.api [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3241808, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.516963] env[69994]: DEBUG nova.network.neutron [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 819.554463] env[69994]: DEBUG nova.compute.manager [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 819.599895] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be685554-3901-4218-aca0-3e93d89e2275 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Lock "dca638aa-c491-431f-a0e5-d02bd76705ad" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 106.214s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 819.781140] env[69994]: DEBUG nova.network.neutron [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Updating instance_info_cache with network_info: [{"id": "2027b219-02f1-4669-80e5-0d03b45b5562", "address": "fa:16:3e:f9:32:6f", "network": {"id": "e133aea5-e663-453d-a16f-2e1fd777c247", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1465432369-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7422894c60b84a4ba1eb1b4dee4920c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e9fa4744-8702-4973-b911-ee18192a3e4b", "external-id": "nsx-vlan-transportzone-318", "segmentation_id": 318, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2027b219-02", "ovs_interfaceid": "2027b219-02f1-4669-80e5-0d03b45b5562", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.860239] env[69994]: DEBUG oslo_vmware.api [None req-90c5962c-3382-44b1-af3f-016a41b350b1 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Task: {'id': task-3241810, 'name': PowerOffVM_Task, 'duration_secs': 0.373439} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.860239] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-90c5962c-3382-44b1-af3f-016a41b350b1 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 819.860239] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-90c5962c-3382-44b1-af3f-016a41b350b1 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 819.860239] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8049c377-e8b9-4a4f-88f2-bec395419b4f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.937633] env[69994]: DEBUG oslo_vmware.api [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3241808, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.770746} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.938000] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e/6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 819.938381] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 819.938802] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-36d62384-7447-4193-91a4-74d9b7dece65 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.951844] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-90c5962c-3382-44b1-af3f-016a41b350b1 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 819.952130] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-90c5962c-3382-44b1-af3f-016a41b350b1 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 819.952325] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-90c5962c-3382-44b1-af3f-016a41b350b1 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Deleting the datastore file [datastore2] df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 819.953864] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-83b004a2-e2a8-49b7-b3dc-6c44f85ca21f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.956127] env[69994]: DEBUG oslo_vmware.api [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for the task: (returnval){ [ 819.956127] env[69994]: value = "task-3241812" [ 819.956127] env[69994]: _type = "Task" [ 819.956127] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.965580] env[69994]: DEBUG oslo_vmware.api [None req-90c5962c-3382-44b1-af3f-016a41b350b1 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Waiting for the task: (returnval){ [ 819.965580] env[69994]: value = "task-3241813" [ 819.965580] env[69994]: _type = "Task" [ 819.965580] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.974264] env[69994]: DEBUG oslo_vmware.api [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3241812, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.978718] env[69994]: DEBUG oslo_vmware.api [None req-90c5962c-3382-44b1-af3f-016a41b350b1 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Task: {'id': task-3241813, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.106300] env[69994]: DEBUG nova.compute.manager [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 820.133043] env[69994]: DEBUG nova.compute.manager [req-7a05a0e4-46b5-426c-b769-efa3c21727e3 req-d3e7652a-6288-4ca5-8556-e2ceb0d6392c service nova] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Received event network-vif-plugged-2027b219-02f1-4669-80e5-0d03b45b5562 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 820.133043] env[69994]: DEBUG oslo_concurrency.lockutils [req-7a05a0e4-46b5-426c-b769-efa3c21727e3 req-d3e7652a-6288-4ca5-8556-e2ceb0d6392c service nova] Acquiring lock "2d812174-d2ad-4fac-8ae5-ffa51d691374-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 820.133178] env[69994]: DEBUG oslo_concurrency.lockutils [req-7a05a0e4-46b5-426c-b769-efa3c21727e3 req-d3e7652a-6288-4ca5-8556-e2ceb0d6392c service nova] Lock "2d812174-d2ad-4fac-8ae5-ffa51d691374-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 820.133252] env[69994]: DEBUG oslo_concurrency.lockutils [req-7a05a0e4-46b5-426c-b769-efa3c21727e3 req-d3e7652a-6288-4ca5-8556-e2ceb0d6392c service nova] Lock "2d812174-d2ad-4fac-8ae5-ffa51d691374-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 820.133426] env[69994]: DEBUG nova.compute.manager [req-7a05a0e4-46b5-426c-b769-efa3c21727e3 req-d3e7652a-6288-4ca5-8556-e2ceb0d6392c service nova] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] No waiting events found dispatching network-vif-plugged-2027b219-02f1-4669-80e5-0d03b45b5562 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 820.133599] env[69994]: WARNING nova.compute.manager [req-7a05a0e4-46b5-426c-b769-efa3c21727e3 req-d3e7652a-6288-4ca5-8556-e2ceb0d6392c service nova] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Received unexpected event network-vif-plugged-2027b219-02f1-4669-80e5-0d03b45b5562 for instance with vm_state building and task_state spawning. [ 820.133758] env[69994]: DEBUG nova.compute.manager [req-7a05a0e4-46b5-426c-b769-efa3c21727e3 req-d3e7652a-6288-4ca5-8556-e2ceb0d6392c service nova] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Received event network-changed-2027b219-02f1-4669-80e5-0d03b45b5562 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 820.133912] env[69994]: DEBUG nova.compute.manager [req-7a05a0e4-46b5-426c-b769-efa3c21727e3 req-d3e7652a-6288-4ca5-8556-e2ceb0d6392c service nova] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Refreshing instance network info cache due to event network-changed-2027b219-02f1-4669-80e5-0d03b45b5562. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 820.134107] env[69994]: DEBUG oslo_concurrency.lockutils [req-7a05a0e4-46b5-426c-b769-efa3c21727e3 req-d3e7652a-6288-4ca5-8556-e2ceb0d6392c service nova] Acquiring lock "refresh_cache-2d812174-d2ad-4fac-8ae5-ffa51d691374" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.198699] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-592b0450-238c-432d-9074-2dcb257f785b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.208543] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34de008f-2143-426b-808d-917b340561e8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.245127] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-446b0b2a-289b-4565-bb45-9ea26ceeff93 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.255587] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08f54a71-fe87-4fd7-af03-1d12f120f31e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.271521] env[69994]: DEBUG nova.compute.provider_tree [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 820.284082] env[69994]: DEBUG oslo_concurrency.lockutils [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Releasing lock "refresh_cache-2d812174-d2ad-4fac-8ae5-ffa51d691374" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 820.284412] env[69994]: DEBUG nova.compute.manager [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Instance network_info: |[{"id": "2027b219-02f1-4669-80e5-0d03b45b5562", "address": "fa:16:3e:f9:32:6f", "network": {"id": "e133aea5-e663-453d-a16f-2e1fd777c247", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1465432369-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7422894c60b84a4ba1eb1b4dee4920c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e9fa4744-8702-4973-b911-ee18192a3e4b", "external-id": "nsx-vlan-transportzone-318", "segmentation_id": 318, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2027b219-02", "ovs_interfaceid": "2027b219-02f1-4669-80e5-0d03b45b5562", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 820.284734] env[69994]: DEBUG oslo_concurrency.lockutils [req-7a05a0e4-46b5-426c-b769-efa3c21727e3 req-d3e7652a-6288-4ca5-8556-e2ceb0d6392c service nova] Acquired lock "refresh_cache-2d812174-d2ad-4fac-8ae5-ffa51d691374" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 820.284920] env[69994]: DEBUG nova.network.neutron [req-7a05a0e4-46b5-426c-b769-efa3c21727e3 req-d3e7652a-6288-4ca5-8556-e2ceb0d6392c service nova] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Refreshing network info cache for port 2027b219-02f1-4669-80e5-0d03b45b5562 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 820.286108] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f9:32:6f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e9fa4744-8702-4973-b911-ee18192a3e4b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2027b219-02f1-4669-80e5-0d03b45b5562', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 820.294093] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Creating folder: Project (7422894c60b84a4ba1eb1b4dee4920c1). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 820.294893] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3c9537ec-545f-49c6-a446-c045cd9261a7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.308758] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Created folder: Project (7422894c60b84a4ba1eb1b4dee4920c1) in parent group-v647729. [ 820.311112] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Creating folder: Instances. Parent ref: group-v647887. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 820.311112] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-40215d3b-1027-4ba3-9f98-b194f25f9442 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.321652] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Created folder: Instances in parent group-v647887. [ 820.321995] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 820.322406] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 820.322707] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b2212878-48fe-47b7-b4dd-f53158c54b0e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.347988] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 820.347988] env[69994]: value = "task-3241816" [ 820.347988] env[69994]: _type = "Task" [ 820.347988] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.357707] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241816, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.467591] env[69994]: DEBUG oslo_vmware.api [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3241812, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.111647} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.471143] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 820.471977] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b071ca8-bb83-45ce-b9c5-f5c11db72d94 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.481318] env[69994]: DEBUG oslo_vmware.api [None req-90c5962c-3382-44b1-af3f-016a41b350b1 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Task: {'id': task-3241813, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.29033} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.490511] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-90c5962c-3382-44b1-af3f-016a41b350b1 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 820.490724] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-90c5962c-3382-44b1-af3f-016a41b350b1 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 820.490937] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-90c5962c-3382-44b1-af3f-016a41b350b1 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 820.491129] env[69994]: INFO nova.compute.manager [None req-90c5962c-3382-44b1-af3f-016a41b350b1 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Took 1.17 seconds to destroy the instance on the hypervisor. [ 820.491370] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-90c5962c-3382-44b1-af3f-016a41b350b1 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 820.500725] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e/6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 820.502375] env[69994]: DEBUG nova.compute.manager [-] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 820.502520] env[69994]: DEBUG nova.network.neutron [-] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 820.504777] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8d2bf1eb-ff9a-4cda-bbf5-830103d66491 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.525623] env[69994]: DEBUG nova.compute.manager [None req-8f774b04-e91f-4bbc-8786-eb72dd23037a tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 820.525623] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abfec0bf-cbde-4f55-85b5-35a8e2977f5b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.536067] env[69994]: DEBUG oslo_vmware.api [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for the task: (returnval){ [ 820.536067] env[69994]: value = "task-3241817" [ 820.536067] env[69994]: _type = "Task" [ 820.536067] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.548676] env[69994]: DEBUG oslo_vmware.api [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3241817, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.568250] env[69994]: DEBUG nova.compute.manager [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 820.600609] env[69994]: DEBUG nova.virt.hardware [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 820.600868] env[69994]: DEBUG nova.virt.hardware [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 820.601083] env[69994]: DEBUG nova.virt.hardware [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 820.601392] env[69994]: DEBUG nova.virt.hardware [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 820.601520] env[69994]: DEBUG nova.virt.hardware [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 820.601703] env[69994]: DEBUG nova.virt.hardware [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 820.603075] env[69994]: DEBUG nova.virt.hardware [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 820.603075] env[69994]: DEBUG nova.virt.hardware [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 820.603075] env[69994]: DEBUG nova.virt.hardware [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 820.603075] env[69994]: DEBUG nova.virt.hardware [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 820.603075] env[69994]: DEBUG nova.virt.hardware [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 820.604261] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-339f611f-0ac2-43eb-9604-5a1e7a67d640 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.619670] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de4a2dd6-0483-4fb9-88e3-1048d42c697e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.659313] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 820.775317] env[69994]: DEBUG nova.scheduler.client.report [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 820.863058] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241816, 'name': CreateVM_Task, 'duration_secs': 0.477719} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.863058] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 820.863058] env[69994]: DEBUG oslo_concurrency.lockutils [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.863058] env[69994]: DEBUG oslo_concurrency.lockutils [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 820.863309] env[69994]: DEBUG oslo_concurrency.lockutils [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 820.865250] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-294470d8-6c1d-4016-b75e-82f973a004d9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.874745] env[69994]: DEBUG oslo_vmware.api [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Waiting for the task: (returnval){ [ 820.874745] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52289e8f-48b5-64de-81ef-758440586e99" [ 820.874745] env[69994]: _type = "Task" [ 820.874745] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.885975] env[69994]: DEBUG oslo_vmware.api [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52289e8f-48b5-64de-81ef-758440586e99, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.942762] env[69994]: DEBUG oslo_vmware.rw_handles [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526112ef-0f27-c6fd-9f2d-268b23adc32f/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 820.943759] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78fe69b8-5ca0-4c99-a232-0feb8bd4e366 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.956147] env[69994]: DEBUG oslo_vmware.rw_handles [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526112ef-0f27-c6fd-9f2d-268b23adc32f/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 820.956427] env[69994]: ERROR oslo_vmware.rw_handles [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526112ef-0f27-c6fd-9f2d-268b23adc32f/disk-0.vmdk due to incomplete transfer. [ 820.958539] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-b5565d40-9556-4ab5-b740-8ba40b07825d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.968349] env[69994]: DEBUG oslo_vmware.rw_handles [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526112ef-0f27-c6fd-9f2d-268b23adc32f/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 820.970058] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Uploaded image 87980241-04ac-4f68-8c15-4aa5e319a6ef to the Glance image server {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 820.973824] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Destroying the VM {{(pid=69994) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 820.974910] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-096b460f-333e-4e5e-b2d9-6b0e07e9e533 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.986333] env[69994]: DEBUG oslo_vmware.api [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 820.986333] env[69994]: value = "task-3241818" [ 820.986333] env[69994]: _type = "Task" [ 820.986333] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.995992] env[69994]: DEBUG oslo_vmware.api [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241818, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.024897] env[69994]: INFO nova.compute.manager [None req-b8a3c1a4-9c7b-46ee-b89e-054966c1ec57 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Unrescuing [ 821.025271] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b8a3c1a4-9c7b-46ee-b89e-054966c1ec57 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquiring lock "refresh_cache-ed662f67-be0e-4f19-bb8a-6af39b4d348c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.025448] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b8a3c1a4-9c7b-46ee-b89e-054966c1ec57 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquired lock "refresh_cache-ed662f67-be0e-4f19-bb8a-6af39b4d348c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 821.025621] env[69994]: DEBUG nova.network.neutron [None req-b8a3c1a4-9c7b-46ee-b89e-054966c1ec57 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 821.037638] env[69994]: INFO nova.compute.manager [None req-8f774b04-e91f-4bbc-8786-eb72dd23037a tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] instance snapshotting [ 821.046157] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c9d908-093f-458a-9c43-d11672994bd9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.056186] env[69994]: DEBUG oslo_vmware.api [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3241817, 'name': ReconfigVM_Task, 'duration_secs': 0.475654} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.071392] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Reconfigured VM instance instance-00000033 to attach disk [datastore1] 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e/6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 821.072844] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-81393105-8320-4cf1-a086-d423ac2f544a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.075325] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f875aa7-56b0-4871-b241-1ca1c5233306 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.088120] env[69994]: DEBUG oslo_vmware.api [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for the task: (returnval){ [ 821.088120] env[69994]: value = "task-3241819" [ 821.088120] env[69994]: _type = "Task" [ 821.088120] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.099248] env[69994]: DEBUG oslo_vmware.api [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3241819, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.168092] env[69994]: DEBUG nova.network.neutron [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Successfully updated port: b02b6f7a-67ae-46ce-aaa8-77cd472b8714 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 821.242706] env[69994]: DEBUG nova.network.neutron [req-7a05a0e4-46b5-426c-b769-efa3c21727e3 req-d3e7652a-6288-4ca5-8556-e2ceb0d6392c service nova] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Updated VIF entry in instance network info cache for port 2027b219-02f1-4669-80e5-0d03b45b5562. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 821.243088] env[69994]: DEBUG nova.network.neutron [req-7a05a0e4-46b5-426c-b769-efa3c21727e3 req-d3e7652a-6288-4ca5-8556-e2ceb0d6392c service nova] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Updating instance_info_cache with network_info: [{"id": "2027b219-02f1-4669-80e5-0d03b45b5562", "address": "fa:16:3e:f9:32:6f", "network": {"id": "e133aea5-e663-453d-a16f-2e1fd777c247", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1465432369-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7422894c60b84a4ba1eb1b4dee4920c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e9fa4744-8702-4973-b911-ee18192a3e4b", "external-id": "nsx-vlan-transportzone-318", "segmentation_id": 318, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2027b219-02", "ovs_interfaceid": "2027b219-02f1-4669-80e5-0d03b45b5562", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.280705] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.739s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 821.281419] env[69994]: DEBUG nova.compute.manager [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 821.284659] env[69994]: DEBUG oslo_concurrency.lockutils [None req-db548667-33dc-4cc5-a718-8be95fc62592 tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.935s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 821.284659] env[69994]: DEBUG nova.objects.instance [None req-db548667-33dc-4cc5-a718-8be95fc62592 tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Lazy-loading 'resources' on Instance uuid cef66a67-e3ac-40dc-a8a4-0375bd64c484 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 821.390784] env[69994]: DEBUG oslo_vmware.api [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52289e8f-48b5-64de-81ef-758440586e99, 'name': SearchDatastore_Task, 'duration_secs': 0.015217} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.391352] env[69994]: DEBUG oslo_concurrency.lockutils [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 821.391484] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 821.391751] env[69994]: DEBUG oslo_concurrency.lockutils [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.392060] env[69994]: DEBUG oslo_concurrency.lockutils [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 821.392129] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 821.392359] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d48d9b23-b71b-4ca5-b1e8-bdfe3e95d99e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.404656] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 821.404907] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 821.406030] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4a111eb-1dfb-4837-86db-10e0a0573156 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.412269] env[69994]: DEBUG oslo_vmware.api [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Waiting for the task: (returnval){ [ 821.412269] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52169915-1546-e2fd-eb7f-7f6cf7b6e630" [ 821.412269] env[69994]: _type = "Task" [ 821.412269] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.422984] env[69994]: DEBUG oslo_vmware.api [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52169915-1546-e2fd-eb7f-7f6cf7b6e630, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.461669] env[69994]: DEBUG nova.network.neutron [-] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.497379] env[69994]: DEBUG oslo_vmware.api [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241818, 'name': Destroy_Task, 'duration_secs': 0.339146} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.498259] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Destroyed the VM [ 821.498597] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Deleting Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 821.498946] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-fa1bcee4-f95d-4de8-8648-34b5e2d96e94 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.507194] env[69994]: DEBUG oslo_vmware.api [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 821.507194] env[69994]: value = "task-3241820" [ 821.507194] env[69994]: _type = "Task" [ 821.507194] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.517397] env[69994]: DEBUG oslo_vmware.api [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241820, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.590408] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8f774b04-e91f-4bbc-8786-eb72dd23037a tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Creating Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 821.590777] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-3c2d5a3f-bfb2-4e7c-83c9-8c1330265f85 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.603547] env[69994]: DEBUG oslo_vmware.api [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3241819, 'name': Rename_Task, 'duration_secs': 0.345191} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.605069] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 821.605414] env[69994]: DEBUG oslo_vmware.api [None req-8f774b04-e91f-4bbc-8786-eb72dd23037a tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for the task: (returnval){ [ 821.605414] env[69994]: value = "task-3241821" [ 821.605414] env[69994]: _type = "Task" [ 821.605414] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.605613] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a1d866a8-7584-4ed6-aa1a-598bef983991 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.619242] env[69994]: DEBUG oslo_vmware.api [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for the task: (returnval){ [ 821.619242] env[69994]: value = "task-3241822" [ 821.619242] env[69994]: _type = "Task" [ 821.619242] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.622549] env[69994]: DEBUG oslo_vmware.api [None req-8f774b04-e91f-4bbc-8786-eb72dd23037a tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241821, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.631986] env[69994]: DEBUG oslo_vmware.api [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3241822, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.672296] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquiring lock "refresh_cache-566522b0-7aa7-4552-9be7-035d742ba394" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.672459] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquired lock "refresh_cache-566522b0-7aa7-4552-9be7-035d742ba394" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 821.672617] env[69994]: DEBUG nova.network.neutron [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 821.749277] env[69994]: DEBUG oslo_concurrency.lockutils [req-7a05a0e4-46b5-426c-b769-efa3c21727e3 req-d3e7652a-6288-4ca5-8556-e2ceb0d6392c service nova] Releasing lock "refresh_cache-2d812174-d2ad-4fac-8ae5-ffa51d691374" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 821.788685] env[69994]: DEBUG nova.compute.utils [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 821.794587] env[69994]: DEBUG nova.compute.manager [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 821.794646] env[69994]: DEBUG nova.network.neutron [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 821.854974] env[69994]: DEBUG nova.network.neutron [None req-b8a3c1a4-9c7b-46ee-b89e-054966c1ec57 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Updating instance_info_cache with network_info: [{"id": "de3e77dc-7712-4e45-b1d3-fd50595cb0f2", "address": "fa:16:3e:1e:ef:15", "network": {"id": "36dc1c22-7ec0-41a6-a3ca-873fea58c351", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1496166079-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30a9ea2f804f49ec8c5c6861b507454e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde3e77dc-77", "ovs_interfaceid": "de3e77dc-7712-4e45-b1d3-fd50595cb0f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.857817] env[69994]: DEBUG nova.policy [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aae36590634048a5a1c9911d5a38a872', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6356297e311c4b47b689a7cda41127f6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 821.923682] env[69994]: DEBUG oslo_vmware.api [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52169915-1546-e2fd-eb7f-7f6cf7b6e630, 'name': SearchDatastore_Task, 'duration_secs': 0.01238} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.927199] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9707ff1f-d252-4b73-bad9-97fa62dde650 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.932293] env[69994]: DEBUG oslo_vmware.api [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Waiting for the task: (returnval){ [ 821.932293] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]525d69b1-59de-b442-0bc8-0ff54ee83059" [ 821.932293] env[69994]: _type = "Task" [ 821.932293] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.948576] env[69994]: DEBUG oslo_vmware.api [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]525d69b1-59de-b442-0bc8-0ff54ee83059, 'name': SearchDatastore_Task, 'duration_secs': 0.011333} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.948957] env[69994]: DEBUG oslo_concurrency.lockutils [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 821.949176] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 2d812174-d2ad-4fac-8ae5-ffa51d691374/2d812174-d2ad-4fac-8ae5-ffa51d691374.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 821.950030] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bb17a297-c77b-49f7-9e0d-2e791d5cf644 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.958151] env[69994]: DEBUG oslo_vmware.api [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Waiting for the task: (returnval){ [ 821.958151] env[69994]: value = "task-3241823" [ 821.958151] env[69994]: _type = "Task" [ 821.958151] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.965356] env[69994]: INFO nova.compute.manager [-] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Took 1.46 seconds to deallocate network for instance. [ 821.971134] env[69994]: DEBUG oslo_vmware.api [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Task: {'id': task-3241823, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.025982] env[69994]: DEBUG oslo_vmware.api [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241820, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.118569] env[69994]: DEBUG oslo_vmware.api [None req-8f774b04-e91f-4bbc-8786-eb72dd23037a tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241821, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.132220] env[69994]: DEBUG oslo_vmware.api [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3241822, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.218994] env[69994]: DEBUG nova.network.neutron [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Successfully created port: 7f7c4dd0-5c90-4dd1-8113-b871712bb2f7 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 822.238401] env[69994]: DEBUG nova.network.neutron [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 822.295550] env[69994]: DEBUG nova.compute.utils [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 822.361971] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b8a3c1a4-9c7b-46ee-b89e-054966c1ec57 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Releasing lock "refresh_cache-ed662f67-be0e-4f19-bb8a-6af39b4d348c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 822.362706] env[69994]: DEBUG nova.objects.instance [None req-b8a3c1a4-9c7b-46ee-b89e-054966c1ec57 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lazy-loading 'flavor' on Instance uuid ed662f67-be0e-4f19-bb8a-6af39b4d348c {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 822.385950] env[69994]: DEBUG nova.compute.manager [req-dc6d3fb5-3333-4cc3-824c-42798195c370 req-4439f50b-450d-4567-82f1-4acbb911cd88 service nova] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Received event network-vif-deleted-ef23e47f-0bc3-4254-bf4a-51e407cd43b7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 822.385950] env[69994]: DEBUG nova.compute.manager [req-dc6d3fb5-3333-4cc3-824c-42798195c370 req-4439f50b-450d-4567-82f1-4acbb911cd88 service nova] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Received event network-vif-plugged-b02b6f7a-67ae-46ce-aaa8-77cd472b8714 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 822.385950] env[69994]: DEBUG oslo_concurrency.lockutils [req-dc6d3fb5-3333-4cc3-824c-42798195c370 req-4439f50b-450d-4567-82f1-4acbb911cd88 service nova] Acquiring lock "566522b0-7aa7-4552-9be7-035d742ba394-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 822.385950] env[69994]: DEBUG oslo_concurrency.lockutils [req-dc6d3fb5-3333-4cc3-824c-42798195c370 req-4439f50b-450d-4567-82f1-4acbb911cd88 service nova] Lock "566522b0-7aa7-4552-9be7-035d742ba394-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 822.385950] env[69994]: DEBUG oslo_concurrency.lockutils [req-dc6d3fb5-3333-4cc3-824c-42798195c370 req-4439f50b-450d-4567-82f1-4acbb911cd88 service nova] Lock "566522b0-7aa7-4552-9be7-035d742ba394-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 822.385950] env[69994]: DEBUG nova.compute.manager [req-dc6d3fb5-3333-4cc3-824c-42798195c370 req-4439f50b-450d-4567-82f1-4acbb911cd88 service nova] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] No waiting events found dispatching network-vif-plugged-b02b6f7a-67ae-46ce-aaa8-77cd472b8714 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 822.385950] env[69994]: WARNING nova.compute.manager [req-dc6d3fb5-3333-4cc3-824c-42798195c370 req-4439f50b-450d-4567-82f1-4acbb911cd88 service nova] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Received unexpected event network-vif-plugged-b02b6f7a-67ae-46ce-aaa8-77cd472b8714 for instance with vm_state building and task_state spawning. [ 822.385950] env[69994]: DEBUG nova.compute.manager [req-dc6d3fb5-3333-4cc3-824c-42798195c370 req-4439f50b-450d-4567-82f1-4acbb911cd88 service nova] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Received event network-changed-b02b6f7a-67ae-46ce-aaa8-77cd472b8714 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 822.385950] env[69994]: DEBUG nova.compute.manager [req-dc6d3fb5-3333-4cc3-824c-42798195c370 req-4439f50b-450d-4567-82f1-4acbb911cd88 service nova] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Refreshing instance network info cache due to event network-changed-b02b6f7a-67ae-46ce-aaa8-77cd472b8714. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 822.385950] env[69994]: DEBUG oslo_concurrency.lockutils [req-dc6d3fb5-3333-4cc3-824c-42798195c370 req-4439f50b-450d-4567-82f1-4acbb911cd88 service nova] Acquiring lock "refresh_cache-566522b0-7aa7-4552-9be7-035d742ba394" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.429472] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a243a556-a7a7-403a-be46-1300f1ff20ff {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.440579] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be56e201-8160-4be1-a9e9-216fc2946673 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.483681] env[69994]: DEBUG oslo_concurrency.lockutils [None req-90c5962c-3382-44b1-af3f-016a41b350b1 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 822.489149] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97321352-e6e8-4819-a8c1-335965a1ef3b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.505203] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7a64c81-4ca1-4e02-b07c-0170a6266e32 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.511321] env[69994]: DEBUG oslo_vmware.api [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Task: {'id': task-3241823, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.525483] env[69994]: DEBUG oslo_vmware.api [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241820, 'name': RemoveSnapshot_Task, 'duration_secs': 0.603813} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.537194] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Deleted Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 822.537604] env[69994]: INFO nova.compute.manager [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Took 15.31 seconds to snapshot the instance on the hypervisor. [ 822.541838] env[69994]: DEBUG nova.compute.provider_tree [None req-db548667-33dc-4cc5-a718-8be95fc62592 tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 822.546498] env[69994]: DEBUG nova.network.neutron [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Updating instance_info_cache with network_info: [{"id": "b02b6f7a-67ae-46ce-aaa8-77cd472b8714", "address": "fa:16:3e:c5:0f:fc", "network": {"id": "dc86f717-1eb9-419c-aa6f-f5dd10804e22", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-830745446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5acf9a4a9344d4c9c91b75e83cf7a76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c66a277b-e3bf-43b8-a632-04fdd0720b91", "external-id": "nsx-vlan-transportzone-665", "segmentation_id": 665, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb02b6f7a-67", "ovs_interfaceid": "b02b6f7a-67ae-46ce-aaa8-77cd472b8714", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.618168] env[69994]: DEBUG oslo_vmware.api [None req-8f774b04-e91f-4bbc-8786-eb72dd23037a tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241821, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.633985] env[69994]: DEBUG oslo_vmware.api [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3241822, 'name': PowerOnVM_Task, 'duration_secs': 0.863302} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.633985] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 822.634190] env[69994]: INFO nova.compute.manager [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Took 9.70 seconds to spawn the instance on the hypervisor. [ 822.634373] env[69994]: DEBUG nova.compute.manager [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 822.635278] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca362389-fda2-4179-86cd-b90b88b4167d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.778915] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 822.779181] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 822.799971] env[69994]: DEBUG nova.compute.manager [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 822.871130] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46324700-4de9-4c6e-b7bf-591e2949e772 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.891608] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8a3c1a4-9c7b-46ee-b89e-054966c1ec57 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 822.891944] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5f474324-d76f-4b3d-b575-afbfeefd15bf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.899980] env[69994]: DEBUG oslo_vmware.api [None req-b8a3c1a4-9c7b-46ee-b89e-054966c1ec57 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 822.899980] env[69994]: value = "task-3241824" [ 822.899980] env[69994]: _type = "Task" [ 822.899980] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.908788] env[69994]: DEBUG oslo_vmware.api [None req-b8a3c1a4-9c7b-46ee-b89e-054966c1ec57 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241824, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.992341] env[69994]: DEBUG oslo_vmware.api [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Task: {'id': task-3241823, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.688594} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.993020] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 2d812174-d2ad-4fac-8ae5-ffa51d691374/2d812174-d2ad-4fac-8ae5-ffa51d691374.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 822.993020] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 822.993299] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-86ac8fc8-6537-4fd7-aaf4-d6a2e68e4df5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.002168] env[69994]: DEBUG oslo_vmware.api [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Waiting for the task: (returnval){ [ 823.002168] env[69994]: value = "task-3241825" [ 823.002168] env[69994]: _type = "Task" [ 823.002168] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.011682] env[69994]: DEBUG oslo_vmware.api [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Task: {'id': task-3241825, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.050080] env[69994]: DEBUG nova.scheduler.client.report [None req-db548667-33dc-4cc5-a718-8be95fc62592 tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 823.056523] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Releasing lock "refresh_cache-566522b0-7aa7-4552-9be7-035d742ba394" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 823.057240] env[69994]: DEBUG nova.compute.manager [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Instance network_info: |[{"id": "b02b6f7a-67ae-46ce-aaa8-77cd472b8714", "address": "fa:16:3e:c5:0f:fc", "network": {"id": "dc86f717-1eb9-419c-aa6f-f5dd10804e22", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-830745446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5acf9a4a9344d4c9c91b75e83cf7a76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c66a277b-e3bf-43b8-a632-04fdd0720b91", "external-id": "nsx-vlan-transportzone-665", "segmentation_id": 665, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb02b6f7a-67", "ovs_interfaceid": "b02b6f7a-67ae-46ce-aaa8-77cd472b8714", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 823.057848] env[69994]: DEBUG oslo_concurrency.lockutils [req-dc6d3fb5-3333-4cc3-824c-42798195c370 req-4439f50b-450d-4567-82f1-4acbb911cd88 service nova] Acquired lock "refresh_cache-566522b0-7aa7-4552-9be7-035d742ba394" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 823.058071] env[69994]: DEBUG nova.network.neutron [req-dc6d3fb5-3333-4cc3-824c-42798195c370 req-4439f50b-450d-4567-82f1-4acbb911cd88 service nova] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Refreshing network info cache for port b02b6f7a-67ae-46ce-aaa8-77cd472b8714 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 823.059819] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c5:0f:fc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c66a277b-e3bf-43b8-a632-04fdd0720b91', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b02b6f7a-67ae-46ce-aaa8-77cd472b8714', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 823.068046] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 823.068908] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 823.069167] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-341a5ee3-4f37-4e8b-9003-7706d1e895f2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.093087] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 823.093087] env[69994]: value = "task-3241826" [ 823.093087] env[69994]: _type = "Task" [ 823.093087] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.100109] env[69994]: DEBUG nova.compute.manager [None req-a2317e12-cc74-4488-835f-01bec5695f28 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Found 2 images (rotation: 2) {{(pid=69994) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 823.107343] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241826, 'name': CreateVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.118825] env[69994]: DEBUG oslo_vmware.api [None req-8f774b04-e91f-4bbc-8786-eb72dd23037a tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241821, 'name': CreateSnapshot_Task, 'duration_secs': 1.474834} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.119136] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8f774b04-e91f-4bbc-8786-eb72dd23037a tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Created Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 823.120037] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24e66b2f-9965-420d-a906-8d24197937e5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.151677] env[69994]: INFO nova.compute.manager [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Took 40.84 seconds to build instance. [ 823.292688] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 823.292878] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 823.293030] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 823.293199] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 823.293352] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 823.293526] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 823.293673] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69994) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 823.293854] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 823.411900] env[69994]: DEBUG oslo_vmware.api [None req-b8a3c1a4-9c7b-46ee-b89e-054966c1ec57 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241824, 'name': PowerOffVM_Task, 'duration_secs': 0.401926} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.412189] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8a3c1a4-9c7b-46ee-b89e-054966c1ec57 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 823.417645] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8a3c1a4-9c7b-46ee-b89e-054966c1ec57 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Reconfiguring VM instance instance-0000002d to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 823.417953] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1b7b6b18-7a16-48b1-bf91-842d3241f55d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.439364] env[69994]: DEBUG oslo_vmware.api [None req-b8a3c1a4-9c7b-46ee-b89e-054966c1ec57 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 823.439364] env[69994]: value = "task-3241827" [ 823.439364] env[69994]: _type = "Task" [ 823.439364] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.449719] env[69994]: DEBUG oslo_vmware.api [None req-b8a3c1a4-9c7b-46ee-b89e-054966c1ec57 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241827, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.512728] env[69994]: DEBUG oslo_vmware.api [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Task: {'id': task-3241825, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074765} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.512728] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 823.513274] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8979c17f-5a0d-4fec-b766-e3cae3bc3a57 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.536405] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Reconfiguring VM instance instance-00000034 to attach disk [datastore1] 2d812174-d2ad-4fac-8ae5-ffa51d691374/2d812174-d2ad-4fac-8ae5-ffa51d691374.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 823.536704] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ce1d218f-7cca-48d5-a049-f3048268487e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.554838] env[69994]: DEBUG oslo_concurrency.lockutils [None req-db548667-33dc-4cc5-a718-8be95fc62592 tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.270s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 823.558457] env[69994]: DEBUG oslo_concurrency.lockutils [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.879s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 823.562039] env[69994]: INFO nova.compute.claims [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 823.564271] env[69994]: DEBUG oslo_vmware.api [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Waiting for the task: (returnval){ [ 823.564271] env[69994]: value = "task-3241828" [ 823.564271] env[69994]: _type = "Task" [ 823.564271] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.577989] env[69994]: DEBUG oslo_vmware.api [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Task: {'id': task-3241828, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.585843] env[69994]: INFO nova.scheduler.client.report [None req-db548667-33dc-4cc5-a718-8be95fc62592 tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Deleted allocations for instance cef66a67-e3ac-40dc-a8a4-0375bd64c484 [ 823.604033] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241826, 'name': CreateVM_Task, 'duration_secs': 0.36987} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.604185] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 823.604759] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.604936] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 823.605291] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 823.606135] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a57dbfef-0677-41ec-9886-94ae42a7f5fc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.611801] env[69994]: DEBUG oslo_vmware.api [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 823.611801] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5230e83a-c775-e7e3-2eaa-4e3129db57c9" [ 823.611801] env[69994]: _type = "Task" [ 823.611801] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.626424] env[69994]: DEBUG oslo_vmware.api [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5230e83a-c775-e7e3-2eaa-4e3129db57c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.642467] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8f774b04-e91f-4bbc-8786-eb72dd23037a tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Creating linked-clone VM from snapshot {{(pid=69994) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 823.643275] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-b2531196-2816-4854-ba06-ffe512d85862 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.652938] env[69994]: DEBUG oslo_vmware.api [None req-8f774b04-e91f-4bbc-8786-eb72dd23037a tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for the task: (returnval){ [ 823.652938] env[69994]: value = "task-3241829" [ 823.652938] env[69994]: _type = "Task" [ 823.652938] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.655043] env[69994]: DEBUG oslo_concurrency.lockutils [None req-87f956ae-bb3e-4116-bb80-1a8e42534b05 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Lock "6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.703s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 823.664986] env[69994]: DEBUG oslo_vmware.api [None req-8f774b04-e91f-4bbc-8786-eb72dd23037a tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241829, 'name': CloneVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.797982] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 823.812331] env[69994]: DEBUG nova.compute.manager [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 823.815325] env[69994]: DEBUG nova.network.neutron [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Successfully updated port: 7f7c4dd0-5c90-4dd1-8113-b871712bb2f7 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 823.849221] env[69994]: DEBUG nova.virt.hardware [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:38:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='226642232',id=19,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-1143913746',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 823.849475] env[69994]: DEBUG nova.virt.hardware [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 823.849663] env[69994]: DEBUG nova.virt.hardware [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 823.849943] env[69994]: DEBUG nova.virt.hardware [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 823.850152] env[69994]: DEBUG nova.virt.hardware [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 823.850333] env[69994]: DEBUG nova.virt.hardware [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 823.850554] env[69994]: DEBUG nova.virt.hardware [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 823.850713] env[69994]: DEBUG nova.virt.hardware [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 823.850900] env[69994]: DEBUG nova.virt.hardware [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 823.851105] env[69994]: DEBUG nova.virt.hardware [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 823.851282] env[69994]: DEBUG nova.virt.hardware [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 823.852669] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e61e1e0b-22b8-421c-a6cb-b7b526e25351 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.861909] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81f0d8af-4f7b-4eef-adc7-705e62006159 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.955157] env[69994]: DEBUG oslo_vmware.api [None req-b8a3c1a4-9c7b-46ee-b89e-054966c1ec57 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241827, 'name': ReconfigVM_Task, 'duration_secs': 0.483137} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.956194] env[69994]: DEBUG nova.network.neutron [req-dc6d3fb5-3333-4cc3-824c-42798195c370 req-4439f50b-450d-4567-82f1-4acbb911cd88 service nova] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Updated VIF entry in instance network info cache for port b02b6f7a-67ae-46ce-aaa8-77cd472b8714. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 823.956612] env[69994]: DEBUG nova.network.neutron [req-dc6d3fb5-3333-4cc3-824c-42798195c370 req-4439f50b-450d-4567-82f1-4acbb911cd88 service nova] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Updating instance_info_cache with network_info: [{"id": "b02b6f7a-67ae-46ce-aaa8-77cd472b8714", "address": "fa:16:3e:c5:0f:fc", "network": {"id": "dc86f717-1eb9-419c-aa6f-f5dd10804e22", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-830745446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5acf9a4a9344d4c9c91b75e83cf7a76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c66a277b-e3bf-43b8-a632-04fdd0720b91", "external-id": "nsx-vlan-transportzone-665", "segmentation_id": 665, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb02b6f7a-67", "ovs_interfaceid": "b02b6f7a-67ae-46ce-aaa8-77cd472b8714", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.957913] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8a3c1a4-9c7b-46ee-b89e-054966c1ec57 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Reconfigured VM instance instance-0000002d to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 823.958610] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8a3c1a4-9c7b-46ee-b89e-054966c1ec57 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 823.958714] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9578b6d4-0289-44ee-9ca7-b071f60411c3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.968139] env[69994]: DEBUG oslo_vmware.api [None req-b8a3c1a4-9c7b-46ee-b89e-054966c1ec57 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 823.968139] env[69994]: value = "task-3241830" [ 823.968139] env[69994]: _type = "Task" [ 823.968139] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.979517] env[69994]: DEBUG oslo_vmware.api [None req-b8a3c1a4-9c7b-46ee-b89e-054966c1ec57 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241830, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.084647] env[69994]: DEBUG oslo_vmware.api [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Task: {'id': task-3241828, 'name': ReconfigVM_Task, 'duration_secs': 0.295031} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.085663] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Reconfigured VM instance instance-00000034 to attach disk [datastore1] 2d812174-d2ad-4fac-8ae5-ffa51d691374/2d812174-d2ad-4fac-8ae5-ffa51d691374.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 824.086384] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1ea9d253-6aff-46dd-a6d6-6b22bb070ad6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.094730] env[69994]: DEBUG oslo_vmware.api [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Waiting for the task: (returnval){ [ 824.094730] env[69994]: value = "task-3241831" [ 824.094730] env[69994]: _type = "Task" [ 824.094730] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.101947] env[69994]: DEBUG oslo_concurrency.lockutils [None req-db548667-33dc-4cc5-a718-8be95fc62592 tempest-ServersV294TestFqdnHostnames-1530289671 tempest-ServersV294TestFqdnHostnames-1530289671-project-member] Lock "cef66a67-e3ac-40dc-a8a4-0375bd64c484" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.531s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 824.107795] env[69994]: DEBUG oslo_vmware.api [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Task: {'id': task-3241831, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.124462] env[69994]: DEBUG oslo_vmware.api [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5230e83a-c775-e7e3-2eaa-4e3129db57c9, 'name': SearchDatastore_Task, 'duration_secs': 0.014551} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.124975] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 824.125110] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 824.125373] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.125531] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 824.125743] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 824.126036] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e8e54e32-4a2c-4417-9bce-4ac6d8d9710d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.139064] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 824.139064] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 824.139318] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b78404e-c5e5-4ffe-8f79-98dbe66fbfc5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.147149] env[69994]: DEBUG oslo_vmware.api [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 824.147149] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52ced9ec-4015-85da-9dc4-7ddc81feac40" [ 824.147149] env[69994]: _type = "Task" [ 824.147149] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.158025] env[69994]: DEBUG oslo_vmware.api [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52ced9ec-4015-85da-9dc4-7ddc81feac40, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.161205] env[69994]: DEBUG nova.compute.manager [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 824.171048] env[69994]: DEBUG oslo_vmware.api [None req-8f774b04-e91f-4bbc-8786-eb72dd23037a tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241829, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.228095] env[69994]: DEBUG nova.compute.manager [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 824.229235] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0d77d49-f143-4775-80bc-525ab23ce74e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.320780] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Acquiring lock "refresh_cache-a4544bc9-6935-4825-9b45-2054d2ced330" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.320780] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Acquired lock "refresh_cache-a4544bc9-6935-4825-9b45-2054d2ced330" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 824.320780] env[69994]: DEBUG nova.network.neutron [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 824.460031] env[69994]: DEBUG oslo_concurrency.lockutils [req-dc6d3fb5-3333-4cc3-824c-42798195c370 req-4439f50b-450d-4567-82f1-4acbb911cd88 service nova] Releasing lock "refresh_cache-566522b0-7aa7-4552-9be7-035d742ba394" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 824.479762] env[69994]: DEBUG oslo_vmware.api [None req-b8a3c1a4-9c7b-46ee-b89e-054966c1ec57 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241830, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.605787] env[69994]: DEBUG oslo_vmware.api [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Task: {'id': task-3241831, 'name': Rename_Task, 'duration_secs': 0.150596} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.609658] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 824.610338] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6661a8c6-0425-4d86-bfd5-0eda870e5d38 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.617155] env[69994]: DEBUG oslo_vmware.api [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Waiting for the task: (returnval){ [ 824.617155] env[69994]: value = "task-3241832" [ 824.617155] env[69994]: _type = "Task" [ 824.617155] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.628288] env[69994]: DEBUG oslo_vmware.api [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Task: {'id': task-3241832, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.659343] env[69994]: DEBUG oslo_vmware.api [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52ced9ec-4015-85da-9dc4-7ddc81feac40, 'name': SearchDatastore_Task, 'duration_secs': 0.015263} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.666591] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1932d51c-02f5-4276-9f2d-a64335eb89aa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.679526] env[69994]: DEBUG oslo_vmware.api [None req-8f774b04-e91f-4bbc-8786-eb72dd23037a tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241829, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.681131] env[69994]: DEBUG oslo_vmware.api [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 824.681131] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52f90e0b-6de7-9d9c-f2bd-2fb33ee3adbb" [ 824.681131] env[69994]: _type = "Task" [ 824.681131] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.687894] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 824.694234] env[69994]: DEBUG oslo_vmware.api [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52f90e0b-6de7-9d9c-f2bd-2fb33ee3adbb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.742252] env[69994]: INFO nova.compute.manager [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] instance snapshotting [ 824.742822] env[69994]: DEBUG nova.objects.instance [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lazy-loading 'flavor' on Instance uuid 45a8dced-6c49-441c-92e2-ee323ed8753c {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 824.900492] env[69994]: DEBUG nova.network.neutron [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 824.980831] env[69994]: DEBUG oslo_vmware.api [None req-b8a3c1a4-9c7b-46ee-b89e-054966c1ec57 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241830, 'name': PowerOnVM_Task, 'duration_secs': 0.57342} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.986251] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8a3c1a4-9c7b-46ee-b89e-054966c1ec57 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 824.986251] env[69994]: DEBUG nova.compute.manager [None req-b8a3c1a4-9c7b-46ee-b89e-054966c1ec57 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 824.986251] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4ffcdcb-134e-40ca-92a7-8b0166e1839a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.126559] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-546cfb20-97c0-45d7-b874-1bdadc3c9154 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.141122] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4bc5df7-2528-4a33-9052-d116d99f936c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.144544] env[69994]: DEBUG oslo_vmware.api [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Task: {'id': task-3241832, 'name': PowerOnVM_Task, 'duration_secs': 0.47541} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.145011] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 825.145272] env[69994]: INFO nova.compute.manager [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Took 7.22 seconds to spawn the instance on the hypervisor. [ 825.145488] env[69994]: DEBUG nova.compute.manager [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 825.146653] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dc97a47-dbb8-42f8-9726-5a7124b0fe30 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.181602] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b68036c5-f3bd-446b-8185-65d5a716e43c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.202068] env[69994]: DEBUG oslo_vmware.api [None req-8f774b04-e91f-4bbc-8786-eb72dd23037a tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241829, 'name': CloneVM_Task} progress is 95%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.206333] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec7de622-a8d4-4dc9-8005-9dc3c2808db7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.213878] env[69994]: DEBUG oslo_vmware.api [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52f90e0b-6de7-9d9c-f2bd-2fb33ee3adbb, 'name': SearchDatastore_Task, 'duration_secs': 0.022815} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.214688] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 825.215094] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 566522b0-7aa7-4552-9be7-035d742ba394/566522b0-7aa7-4552-9be7-035d742ba394.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 825.215477] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0f00e93f-f64f-4daf-b147-e6039daee2c3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.227314] env[69994]: DEBUG nova.compute.provider_tree [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 825.234914] env[69994]: DEBUG oslo_vmware.api [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 825.234914] env[69994]: value = "task-3241833" [ 825.234914] env[69994]: _type = "Task" [ 825.234914] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.249386] env[69994]: DEBUG oslo_vmware.api [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241833, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.250312] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ee0ef7-6137-451d-9d77-ace9b9da9848 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.272744] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fe6780a-2ca5-4702-9be6-f7f7aa406efc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.283070] env[69994]: DEBUG nova.network.neutron [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Updating instance_info_cache with network_info: [{"id": "7f7c4dd0-5c90-4dd1-8113-b871712bb2f7", "address": "fa:16:3e:97:15:a8", "network": {"id": "c152967f-46eb-4bab-a1fd-985f8d1e41e3", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-381864411-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6356297e311c4b47b689a7cda41127f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "274afb4c-04df-4213-8ad2-8f48a10d78a8", "external-id": "nsx-vlan-transportzone-515", "segmentation_id": 515, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f7c4dd0-5c", "ovs_interfaceid": "7f7c4dd0-5c90-4dd1-8113-b871712bb2f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.688380] env[69994]: DEBUG oslo_vmware.api [None req-8f774b04-e91f-4bbc-8786-eb72dd23037a tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241829, 'name': CloneVM_Task, 'duration_secs': 1.767892} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.688829] env[69994]: INFO nova.virt.vmwareapi.vmops [None req-8f774b04-e91f-4bbc-8786-eb72dd23037a tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Created linked-clone VM from snapshot [ 825.689505] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a914bb2f-0712-41d4-88a4-6ee30e6add2f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.698443] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-8f774b04-e91f-4bbc-8786-eb72dd23037a tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Uploading image c4563fd6-35f0-4287-8914-68792da7b7ee {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 825.708324] env[69994]: INFO nova.compute.manager [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Took 42.17 seconds to build instance. [ 825.712928] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f774b04-e91f-4bbc-8786-eb72dd23037a tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Destroying the VM {{(pid=69994) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 825.712928] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-5e74de4c-4eb9-4b16-a0b0-9c127f95c687 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.722870] env[69994]: DEBUG oslo_vmware.api [None req-8f774b04-e91f-4bbc-8786-eb72dd23037a tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for the task: (returnval){ [ 825.722870] env[69994]: value = "task-3241834" [ 825.722870] env[69994]: _type = "Task" [ 825.722870] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.735524] env[69994]: DEBUG nova.scheduler.client.report [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 825.739138] env[69994]: DEBUG oslo_vmware.api [None req-8f774b04-e91f-4bbc-8786-eb72dd23037a tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241834, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.752847] env[69994]: DEBUG oslo_vmware.api [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241833, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.786168] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Releasing lock "refresh_cache-a4544bc9-6935-4825-9b45-2054d2ced330" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 825.786591] env[69994]: DEBUG nova.compute.manager [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Instance network_info: |[{"id": "7f7c4dd0-5c90-4dd1-8113-b871712bb2f7", "address": "fa:16:3e:97:15:a8", "network": {"id": "c152967f-46eb-4bab-a1fd-985f8d1e41e3", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-381864411-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6356297e311c4b47b689a7cda41127f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "274afb4c-04df-4213-8ad2-8f48a10d78a8", "external-id": "nsx-vlan-transportzone-515", "segmentation_id": 515, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f7c4dd0-5c", "ovs_interfaceid": "7f7c4dd0-5c90-4dd1-8113-b871712bb2f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 825.787575] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Creating Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 825.788058] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:97:15:a8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '274afb4c-04df-4213-8ad2-8f48a10d78a8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7f7c4dd0-5c90-4dd1-8113-b871712bb2f7', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 825.795650] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 825.795863] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-274a1159-9d36-4fb4-a4d7-286ea2cb7e77 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.798639] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 825.798893] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e7ab1338-a0c9-4ded-a59a-9302e8d1769c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.819897] env[69994]: DEBUG oslo_vmware.api [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 825.819897] env[69994]: value = "task-3241835" [ 825.819897] env[69994]: _type = "Task" [ 825.819897] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.821270] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 825.821270] env[69994]: value = "task-3241836" [ 825.821270] env[69994]: _type = "Task" [ 825.821270] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.834330] env[69994]: DEBUG oslo_vmware.api [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241835, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.849842] env[69994]: DEBUG nova.compute.manager [req-da7be007-3a1d-47b8-abe3-14a03864ca21 req-dce25ae1-1e68-45c8-9c8f-89b3bad5c961 service nova] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Received event network-vif-plugged-7f7c4dd0-5c90-4dd1-8113-b871712bb2f7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 825.849842] env[69994]: DEBUG oslo_concurrency.lockutils [req-da7be007-3a1d-47b8-abe3-14a03864ca21 req-dce25ae1-1e68-45c8-9c8f-89b3bad5c961 service nova] Acquiring lock "a4544bc9-6935-4825-9b45-2054d2ced330-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 825.849842] env[69994]: DEBUG oslo_concurrency.lockutils [req-da7be007-3a1d-47b8-abe3-14a03864ca21 req-dce25ae1-1e68-45c8-9c8f-89b3bad5c961 service nova] Lock "a4544bc9-6935-4825-9b45-2054d2ced330-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 825.849842] env[69994]: DEBUG oslo_concurrency.lockutils [req-da7be007-3a1d-47b8-abe3-14a03864ca21 req-dce25ae1-1e68-45c8-9c8f-89b3bad5c961 service nova] Lock "a4544bc9-6935-4825-9b45-2054d2ced330-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 825.849842] env[69994]: DEBUG nova.compute.manager [req-da7be007-3a1d-47b8-abe3-14a03864ca21 req-dce25ae1-1e68-45c8-9c8f-89b3bad5c961 service nova] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] No waiting events found dispatching network-vif-plugged-7f7c4dd0-5c90-4dd1-8113-b871712bb2f7 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 825.849842] env[69994]: WARNING nova.compute.manager [req-da7be007-3a1d-47b8-abe3-14a03864ca21 req-dce25ae1-1e68-45c8-9c8f-89b3bad5c961 service nova] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Received unexpected event network-vif-plugged-7f7c4dd0-5c90-4dd1-8113-b871712bb2f7 for instance with vm_state building and task_state spawning. [ 825.849842] env[69994]: DEBUG nova.compute.manager [req-da7be007-3a1d-47b8-abe3-14a03864ca21 req-dce25ae1-1e68-45c8-9c8f-89b3bad5c961 service nova] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Received event network-changed-7f7c4dd0-5c90-4dd1-8113-b871712bb2f7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 825.849842] env[69994]: DEBUG nova.compute.manager [req-da7be007-3a1d-47b8-abe3-14a03864ca21 req-dce25ae1-1e68-45c8-9c8f-89b3bad5c961 service nova] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Refreshing instance network info cache due to event network-changed-7f7c4dd0-5c90-4dd1-8113-b871712bb2f7. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 825.850167] env[69994]: DEBUG oslo_concurrency.lockutils [req-da7be007-3a1d-47b8-abe3-14a03864ca21 req-dce25ae1-1e68-45c8-9c8f-89b3bad5c961 service nova] Acquiring lock "refresh_cache-a4544bc9-6935-4825-9b45-2054d2ced330" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.850167] env[69994]: DEBUG oslo_concurrency.lockutils [req-da7be007-3a1d-47b8-abe3-14a03864ca21 req-dce25ae1-1e68-45c8-9c8f-89b3bad5c961 service nova] Acquired lock "refresh_cache-a4544bc9-6935-4825-9b45-2054d2ced330" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 825.850385] env[69994]: DEBUG nova.network.neutron [req-da7be007-3a1d-47b8-abe3-14a03864ca21 req-dce25ae1-1e68-45c8-9c8f-89b3bad5c961 service nova] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Refreshing network info cache for port 7f7c4dd0-5c90-4dd1-8113-b871712bb2f7 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 826.212338] env[69994]: DEBUG oslo_concurrency.lockutils [None req-70e8d25d-0ab3-433a-997a-3bd3efa4d58d tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Lock "2d812174-d2ad-4fac-8ae5-ffa51d691374" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 90.867s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 826.235788] env[69994]: DEBUG oslo_vmware.api [None req-8f774b04-e91f-4bbc-8786-eb72dd23037a tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241834, 'name': Destroy_Task, 'duration_secs': 0.477118} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.238124] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-8f774b04-e91f-4bbc-8786-eb72dd23037a tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Destroyed the VM [ 826.238447] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8f774b04-e91f-4bbc-8786-eb72dd23037a tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Deleting Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 826.238975] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-e7e6eec3-1039-4538-92e4-f98c94e08b23 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.248637] env[69994]: DEBUG oslo_concurrency.lockutils [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.690s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 826.250151] env[69994]: DEBUG nova.compute.manager [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 826.256199] env[69994]: DEBUG oslo_concurrency.lockutils [None req-47d7b11d-1c8e-431a-a83f-29066a332817 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.093s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 826.256442] env[69994]: DEBUG nova.objects.instance [None req-47d7b11d-1c8e-431a-a83f-29066a332817 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Lazy-loading 'resources' on Instance uuid 0b284e71-7af2-4782-b950-4f7eac5221a4 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 826.258053] env[69994]: DEBUG oslo_vmware.api [None req-8f774b04-e91f-4bbc-8786-eb72dd23037a tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for the task: (returnval){ [ 826.258053] env[69994]: value = "task-3241837" [ 826.258053] env[69994]: _type = "Task" [ 826.258053] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.271193] env[69994]: DEBUG oslo_vmware.api [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241833, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.534747} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.271881] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 566522b0-7aa7-4552-9be7-035d742ba394/566522b0-7aa7-4552-9be7-035d742ba394.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 826.272253] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 826.273096] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-21069edf-e88e-46b2-a717-f7760a005656 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.279052] env[69994]: DEBUG oslo_vmware.api [None req-8f774b04-e91f-4bbc-8786-eb72dd23037a tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241837, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.286757] env[69994]: DEBUG oslo_vmware.api [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 826.286757] env[69994]: value = "task-3241838" [ 826.286757] env[69994]: _type = "Task" [ 826.286757] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.301525] env[69994]: DEBUG oslo_vmware.api [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241838, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.339101] env[69994]: DEBUG oslo_vmware.api [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241835, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.342693] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241836, 'name': CreateVM_Task, 'duration_secs': 0.399594} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.342981] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 826.343611] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.345194] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 826.345194] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 826.345640] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af43cee6-9b35-4bfc-ad59-9b06c1f2578f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.350881] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Acquiring lock "7ef329a2-4d61-428a-8a43-f309a1e953d6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 826.351219] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Lock "7ef329a2-4d61-428a-8a43-f309a1e953d6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 826.361113] env[69994]: DEBUG oslo_vmware.api [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Waiting for the task: (returnval){ [ 826.361113] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52a1d49b-cdc1-b422-dfd9-8494e09bf389" [ 826.361113] env[69994]: _type = "Task" [ 826.361113] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.370746] env[69994]: DEBUG oslo_vmware.api [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52a1d49b-cdc1-b422-dfd9-8494e09bf389, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.715585] env[69994]: DEBUG nova.compute.manager [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 826.758230] env[69994]: DEBUG nova.compute.utils [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 826.759941] env[69994]: DEBUG nova.compute.manager [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 826.760272] env[69994]: DEBUG nova.network.neutron [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 826.792025] env[69994]: DEBUG oslo_vmware.api [None req-8f774b04-e91f-4bbc-8786-eb72dd23037a tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241837, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.803129] env[69994]: DEBUG oslo_vmware.api [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241838, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.09912} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.807401] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 826.807401] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c05e483-f7bd-4647-91ce-097e7bd2d9be {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.835557] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Reconfiguring VM instance instance-00000035 to attach disk [datastore1] 566522b0-7aa7-4552-9be7-035d742ba394/566522b0-7aa7-4552-9be7-035d742ba394.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 826.838474] env[69994]: DEBUG nova.policy [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '464e2e64f77042f69423965a2694b159', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '95dff361679f4d3eb08daf6701c7ab82', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 826.847073] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f00b4d58-8420-4c32-8ab6-6f7354098d12 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.875321] env[69994]: DEBUG oslo_vmware.api [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241835, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.877374] env[69994]: DEBUG oslo_vmware.api [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 826.877374] env[69994]: value = "task-3241839" [ 826.877374] env[69994]: _type = "Task" [ 826.877374] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.885205] env[69994]: DEBUG oslo_vmware.api [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52a1d49b-cdc1-b422-dfd9-8494e09bf389, 'name': SearchDatastore_Task, 'duration_secs': 0.014494} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.886131] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 826.886528] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 826.887199] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.887436] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 826.887728] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 826.891398] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1f4ef514-888c-4134-b237-bc0265d55c93 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.898626] env[69994]: DEBUG oslo_vmware.api [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241839, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.909509] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 826.909734] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 826.913751] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5df9fc8b-231c-4ca7-b1d8-83c251ad9dab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.921154] env[69994]: DEBUG oslo_vmware.api [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Waiting for the task: (returnval){ [ 826.921154] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]525e0ca9-72d7-e06e-651e-0dd49faf7a60" [ 826.921154] env[69994]: _type = "Task" [ 826.921154] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.931733] env[69994]: DEBUG oslo_vmware.api [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]525e0ca9-72d7-e06e-651e-0dd49faf7a60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.094925] env[69994]: DEBUG nova.network.neutron [req-da7be007-3a1d-47b8-abe3-14a03864ca21 req-dce25ae1-1e68-45c8-9c8f-89b3bad5c961 service nova] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Updated VIF entry in instance network info cache for port 7f7c4dd0-5c90-4dd1-8113-b871712bb2f7. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 827.095201] env[69994]: DEBUG nova.network.neutron [req-da7be007-3a1d-47b8-abe3-14a03864ca21 req-dce25ae1-1e68-45c8-9c8f-89b3bad5c961 service nova] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Updating instance_info_cache with network_info: [{"id": "7f7c4dd0-5c90-4dd1-8113-b871712bb2f7", "address": "fa:16:3e:97:15:a8", "network": {"id": "c152967f-46eb-4bab-a1fd-985f8d1e41e3", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-381864411-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6356297e311c4b47b689a7cda41127f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "274afb4c-04df-4213-8ad2-8f48a10d78a8", "external-id": "nsx-vlan-transportzone-515", "segmentation_id": 515, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f7c4dd0-5c", "ovs_interfaceid": "7f7c4dd0-5c90-4dd1-8113-b871712bb2f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.171223] env[69994]: DEBUG nova.network.neutron [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Successfully created port: 58907f85-0b65-4837-9d8e-da4ed1cf1be6 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 827.237760] env[69994]: DEBUG oslo_concurrency.lockutils [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 827.272369] env[69994]: DEBUG nova.compute.manager [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 827.285580] env[69994]: DEBUG oslo_vmware.api [None req-8f774b04-e91f-4bbc-8786-eb72dd23037a tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241837, 'name': RemoveSnapshot_Task, 'duration_secs': 0.867049} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.285909] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8f774b04-e91f-4bbc-8786-eb72dd23037a tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Deleted Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 827.337415] env[69994]: DEBUG oslo_vmware.api [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241835, 'name': CreateSnapshot_Task, 'duration_secs': 1.127964} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.337842] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Created Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 827.338474] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21b76d3d-1346-438d-805d-15c119c26fd9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.394837] env[69994]: DEBUG oslo_vmware.api [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241839, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.441155] env[69994]: DEBUG oslo_vmware.api [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]525e0ca9-72d7-e06e-651e-0dd49faf7a60, 'name': SearchDatastore_Task, 'duration_secs': 0.015745} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.442145] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13dbe03c-8ace-4633-8e33-c9475bffd106 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.454655] env[69994]: DEBUG oslo_vmware.api [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Waiting for the task: (returnval){ [ 827.454655] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5299618d-60cd-f815-5eee-2b866ea9f652" [ 827.454655] env[69994]: _type = "Task" [ 827.454655] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.465522] env[69994]: DEBUG oslo_vmware.api [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5299618d-60cd-f815-5eee-2b866ea9f652, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.508630] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e01ab32-f7d1-4a7a-aa44-28419a920303 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.517162] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfb833b1-2647-4d16-b9de-4df4d692b007 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.552805] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f106b1c-a45c-4092-95d9-97674c4d2a3f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.561459] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c1ab582-9bde-4350-b3e7-676b4337666c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.576331] env[69994]: DEBUG nova.compute.provider_tree [None req-47d7b11d-1c8e-431a-a83f-29066a332817 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 827.598214] env[69994]: DEBUG oslo_concurrency.lockutils [req-da7be007-3a1d-47b8-abe3-14a03864ca21 req-dce25ae1-1e68-45c8-9c8f-89b3bad5c961 service nova] Releasing lock "refresh_cache-a4544bc9-6935-4825-9b45-2054d2ced330" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 827.790851] env[69994]: WARNING nova.compute.manager [None req-8f774b04-e91f-4bbc-8786-eb72dd23037a tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Image not found during snapshot: nova.exception.ImageNotFound: Image c4563fd6-35f0-4287-8914-68792da7b7ee could not be found. [ 827.861268] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Creating linked-clone VM from snapshot {{(pid=69994) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 827.861759] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-cd0dc971-74a9-48b8-83d0-20a7f7a2ffeb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.874994] env[69994]: DEBUG oslo_vmware.api [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 827.874994] env[69994]: value = "task-3241840" [ 827.874994] env[69994]: _type = "Task" [ 827.874994] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.890287] env[69994]: DEBUG oslo_vmware.api [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241840, 'name': CloneVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.895363] env[69994]: DEBUG oslo_vmware.api [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241839, 'name': ReconfigVM_Task, 'duration_secs': 0.657395} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.898064] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Reconfigured VM instance instance-00000035 to attach disk [datastore1] 566522b0-7aa7-4552-9be7-035d742ba394/566522b0-7aa7-4552-9be7-035d742ba394.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 827.900340] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-94f7de98-1074-4ff4-85ed-d9d1d5db0cd8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.908092] env[69994]: DEBUG oslo_vmware.api [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 827.908092] env[69994]: value = "task-3241841" [ 827.908092] env[69994]: _type = "Task" [ 827.908092] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.916929] env[69994]: DEBUG oslo_vmware.api [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241841, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.968184] env[69994]: DEBUG oslo_vmware.api [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5299618d-60cd-f815-5eee-2b866ea9f652, 'name': SearchDatastore_Task, 'duration_secs': 0.013496} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.968699] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 827.969084] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] a4544bc9-6935-4825-9b45-2054d2ced330/a4544bc9-6935-4825-9b45-2054d2ced330.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 827.969646] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e8c88f7d-fb11-4df7-b733-6ca9ec9d8786 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.980451] env[69994]: DEBUG oslo_vmware.api [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Waiting for the task: (returnval){ [ 827.980451] env[69994]: value = "task-3241842" [ 827.980451] env[69994]: _type = "Task" [ 827.980451] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.992019] env[69994]: DEBUG oslo_vmware.api [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3241842, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.082382] env[69994]: DEBUG nova.scheduler.client.report [None req-47d7b11d-1c8e-431a-a83f-29066a332817 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 828.285305] env[69994]: DEBUG nova.compute.manager [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 828.320266] env[69994]: DEBUG nova.virt.hardware [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 828.320534] env[69994]: DEBUG nova.virt.hardware [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 828.320811] env[69994]: DEBUG nova.virt.hardware [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 828.320897] env[69994]: DEBUG nova.virt.hardware [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 828.321251] env[69994]: DEBUG nova.virt.hardware [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 828.321473] env[69994]: DEBUG nova.virt.hardware [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 828.321822] env[69994]: DEBUG nova.virt.hardware [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 828.321940] env[69994]: DEBUG nova.virt.hardware [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 828.322165] env[69994]: DEBUG nova.virt.hardware [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 828.322490] env[69994]: DEBUG nova.virt.hardware [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 828.322589] env[69994]: DEBUG nova.virt.hardware [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 828.323594] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae98d69e-871c-414e-847c-7d28a2da0209 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.333803] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-794c6bfe-53fa-440c-830b-7b9161a084f4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.388616] env[69994]: DEBUG oslo_vmware.api [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241840, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.422541] env[69994]: DEBUG oslo_vmware.api [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241841, 'name': Rename_Task, 'duration_secs': 0.372326} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.422541] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 828.422541] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a0ec03d3-4cf1-461e-97f1-0eb824919108 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.431739] env[69994]: DEBUG oslo_vmware.api [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 828.431739] env[69994]: value = "task-3241843" [ 828.431739] env[69994]: _type = "Task" [ 828.431739] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.442624] env[69994]: DEBUG oslo_vmware.api [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241843, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.491458] env[69994]: DEBUG oslo_vmware.api [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3241842, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.588386] env[69994]: DEBUG oslo_concurrency.lockutils [None req-47d7b11d-1c8e-431a-a83f-29066a332817 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.332s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 828.592649] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7e6325bb-69fd-4ef8-a966-253a88f6f761 tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.922s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 828.595417] env[69994]: DEBUG nova.objects.instance [None req-7e6325bb-69fd-4ef8-a966-253a88f6f761 tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Lazy-loading 'resources' on Instance uuid 744fe018-d12c-44c2-98f1-c11fbfffc98e {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 828.625518] env[69994]: INFO nova.scheduler.client.report [None req-47d7b11d-1c8e-431a-a83f-29066a332817 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Deleted allocations for instance 0b284e71-7af2-4782-b950-4f7eac5221a4 [ 828.888699] env[69994]: DEBUG oslo_vmware.api [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241840, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.943871] env[69994]: DEBUG oslo_vmware.api [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241843, 'name': PowerOnVM_Task} progress is 78%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.958320] env[69994]: DEBUG oslo_concurrency.lockutils [None req-924d5545-1778-46c6-838a-748c49f55cf3 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Acquiring lock "dca638aa-c491-431f-a0e5-d02bd76705ad" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 828.958320] env[69994]: DEBUG oslo_concurrency.lockutils [None req-924d5545-1778-46c6-838a-748c49f55cf3 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Lock "dca638aa-c491-431f-a0e5-d02bd76705ad" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 828.958320] env[69994]: DEBUG oslo_concurrency.lockutils [None req-924d5545-1778-46c6-838a-748c49f55cf3 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Acquiring lock "dca638aa-c491-431f-a0e5-d02bd76705ad-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 828.958320] env[69994]: DEBUG oslo_concurrency.lockutils [None req-924d5545-1778-46c6-838a-748c49f55cf3 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Lock "dca638aa-c491-431f-a0e5-d02bd76705ad-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 828.958320] env[69994]: DEBUG oslo_concurrency.lockutils [None req-924d5545-1778-46c6-838a-748c49f55cf3 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Lock "dca638aa-c491-431f-a0e5-d02bd76705ad-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 828.959863] env[69994]: DEBUG nova.network.neutron [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Successfully updated port: 58907f85-0b65-4837-9d8e-da4ed1cf1be6 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 828.961303] env[69994]: INFO nova.compute.manager [None req-924d5545-1778-46c6-838a-748c49f55cf3 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Terminating instance [ 828.992795] env[69994]: DEBUG oslo_vmware.api [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3241842, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.622665} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.992795] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] a4544bc9-6935-4825-9b45-2054d2ced330/a4544bc9-6935-4825-9b45-2054d2ced330.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 828.992795] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 828.992795] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c34351e8-9cf3-4749-a937-84d9366d83f2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.001142] env[69994]: DEBUG oslo_vmware.api [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Waiting for the task: (returnval){ [ 829.001142] env[69994]: value = "task-3241844" [ 829.001142] env[69994]: _type = "Task" [ 829.001142] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.015697] env[69994]: DEBUG oslo_vmware.api [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3241844, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.140479] env[69994]: DEBUG oslo_concurrency.lockutils [None req-47d7b11d-1c8e-431a-a83f-29066a332817 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Lock "0b284e71-7af2-4782-b950-4f7eac5221a4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.420s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 829.140996] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c1399444-3dc1-4821-a62a-2baf311e4790 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Lock "0b284e71-7af2-4782-b950-4f7eac5221a4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 34.712s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 829.141231] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c1399444-3dc1-4821-a62a-2baf311e4790 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Acquiring lock "0b284e71-7af2-4782-b950-4f7eac5221a4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 829.141430] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c1399444-3dc1-4821-a62a-2baf311e4790 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Lock "0b284e71-7af2-4782-b950-4f7eac5221a4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 829.141591] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c1399444-3dc1-4821-a62a-2baf311e4790 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Lock "0b284e71-7af2-4782-b950-4f7eac5221a4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 829.147381] env[69994]: INFO nova.compute.manager [None req-c1399444-3dc1-4821-a62a-2baf311e4790 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Terminating instance [ 829.160173] env[69994]: DEBUG nova.compute.manager [req-cf0c475e-5edc-47c1-97d1-62bee80195bb req-4648116a-03e7-4d3a-afd4-a69eed4cad54 service nova] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Received event network-vif-plugged-58907f85-0b65-4837-9d8e-da4ed1cf1be6 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 829.160400] env[69994]: DEBUG oslo_concurrency.lockutils [req-cf0c475e-5edc-47c1-97d1-62bee80195bb req-4648116a-03e7-4d3a-afd4-a69eed4cad54 service nova] Acquiring lock "8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 829.160609] env[69994]: DEBUG oslo_concurrency.lockutils [req-cf0c475e-5edc-47c1-97d1-62bee80195bb req-4648116a-03e7-4d3a-afd4-a69eed4cad54 service nova] Lock "8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 829.160777] env[69994]: DEBUG oslo_concurrency.lockutils [req-cf0c475e-5edc-47c1-97d1-62bee80195bb req-4648116a-03e7-4d3a-afd4-a69eed4cad54 service nova] Lock "8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 829.160986] env[69994]: DEBUG nova.compute.manager [req-cf0c475e-5edc-47c1-97d1-62bee80195bb req-4648116a-03e7-4d3a-afd4-a69eed4cad54 service nova] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] No waiting events found dispatching network-vif-plugged-58907f85-0b65-4837-9d8e-da4ed1cf1be6 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 829.161293] env[69994]: WARNING nova.compute.manager [req-cf0c475e-5edc-47c1-97d1-62bee80195bb req-4648116a-03e7-4d3a-afd4-a69eed4cad54 service nova] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Received unexpected event network-vif-plugged-58907f85-0b65-4837-9d8e-da4ed1cf1be6 for instance with vm_state building and task_state spawning. [ 829.405294] env[69994]: DEBUG oslo_vmware.api [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241840, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.445540] env[69994]: DEBUG oslo_vmware.api [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241843, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.465615] env[69994]: DEBUG oslo_concurrency.lockutils [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Acquiring lock "refresh_cache-8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.465615] env[69994]: DEBUG oslo_concurrency.lockutils [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Acquired lock "refresh_cache-8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 829.465615] env[69994]: DEBUG nova.network.neutron [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 829.466973] env[69994]: DEBUG nova.compute.manager [None req-924d5545-1778-46c6-838a-748c49f55cf3 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 829.467211] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-924d5545-1778-46c6-838a-748c49f55cf3 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 829.468076] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41800827-a73d-48ba-9f3a-60236e4a2787 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.482347] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-924d5545-1778-46c6-838a-748c49f55cf3 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 829.486392] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eed4aa28-95b4-4904-9fd1-205c7eae4f2f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.497129] env[69994]: DEBUG oslo_vmware.api [None req-924d5545-1778-46c6-838a-748c49f55cf3 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for the task: (returnval){ [ 829.497129] env[69994]: value = "task-3241845" [ 829.497129] env[69994]: _type = "Task" [ 829.497129] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.516183] env[69994]: DEBUG oslo_vmware.api [None req-924d5545-1778-46c6-838a-748c49f55cf3 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241845, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.524565] env[69994]: DEBUG oslo_vmware.api [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3241844, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.156578} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.524917] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 829.527395] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59cb5fa9-fa8d-4d74-97de-0a9ae7ed8717 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.560069] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] a4544bc9-6935-4825-9b45-2054d2ced330/a4544bc9-6935-4825-9b45-2054d2ced330.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 829.563121] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b3efce16-0ef8-4ef8-bea1-dde0e2816ee5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.584660] env[69994]: DEBUG oslo_vmware.api [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Waiting for the task: (returnval){ [ 829.584660] env[69994]: value = "task-3241846" [ 829.584660] env[69994]: _type = "Task" [ 829.584660] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.596465] env[69994]: DEBUG oslo_vmware.api [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3241846, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.651939] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c1399444-3dc1-4821-a62a-2baf311e4790 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Acquiring lock "refresh_cache-0b284e71-7af2-4782-b950-4f7eac5221a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.652198] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c1399444-3dc1-4821-a62a-2baf311e4790 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Acquired lock "refresh_cache-0b284e71-7af2-4782-b950-4f7eac5221a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 829.652388] env[69994]: DEBUG nova.network.neutron [None req-c1399444-3dc1-4821-a62a-2baf311e4790 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 829.817561] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8d950dc-1f39-4459-b791-428d8e08ad12 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.828407] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6a9748d-8de7-47bc-8630-4aa5e4bc6e16 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.868533] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c69cd85-8686-44df-a7d2-f418bee9dc47 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.877423] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12833595-4752-438d-83fc-5ce89f211b0a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.896060] env[69994]: DEBUG nova.compute.provider_tree [None req-7e6325bb-69fd-4ef8-a966-253a88f6f761 tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 829.900601] env[69994]: DEBUG oslo_vmware.api [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241840, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.943378] env[69994]: DEBUG oslo_vmware.api [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241843, 'name': PowerOnVM_Task, 'duration_secs': 1.112465} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.943657] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 829.943863] env[69994]: INFO nova.compute.manager [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Took 9.38 seconds to spawn the instance on the hypervisor. [ 829.944060] env[69994]: DEBUG nova.compute.manager [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 829.944859] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1e3b42b-1308-425e-b020-4b691527b6ef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.012323] env[69994]: DEBUG oslo_vmware.api [None req-924d5545-1778-46c6-838a-748c49f55cf3 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241845, 'name': PowerOffVM_Task, 'duration_secs': 0.362668} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.012646] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-924d5545-1778-46c6-838a-748c49f55cf3 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 830.012843] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-924d5545-1778-46c6-838a-748c49f55cf3 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 830.013133] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e6c9c4b7-4b1d-430a-befe-34eabf063519 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.023624] env[69994]: DEBUG nova.network.neutron [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 830.080554] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-924d5545-1778-46c6-838a-748c49f55cf3 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 830.080756] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-924d5545-1778-46c6-838a-748c49f55cf3 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 830.081071] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-924d5545-1778-46c6-838a-748c49f55cf3 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Deleting the datastore file [datastore1] dca638aa-c491-431f-a0e5-d02bd76705ad {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 830.081333] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ae5e9846-ab0f-40c1-b7ad-863049d366d3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.090736] env[69994]: DEBUG oslo_vmware.api [None req-924d5545-1778-46c6-838a-748c49f55cf3 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for the task: (returnval){ [ 830.090736] env[69994]: value = "task-3241848" [ 830.090736] env[69994]: _type = "Task" [ 830.090736] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.097195] env[69994]: DEBUG oslo_vmware.api [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3241846, 'name': ReconfigVM_Task, 'duration_secs': 0.444965} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.097789] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Reconfigured VM instance instance-00000036 to attach disk [datastore1] a4544bc9-6935-4825-9b45-2054d2ced330/a4544bc9-6935-4825-9b45-2054d2ced330.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 830.098148] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=69994) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 830.098831] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-5e70bab4-8345-491e-bc19-0f4bee910c98 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.103690] env[69994]: DEBUG oslo_vmware.api [None req-924d5545-1778-46c6-838a-748c49f55cf3 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241848, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.109125] env[69994]: DEBUG oslo_vmware.api [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Waiting for the task: (returnval){ [ 830.109125] env[69994]: value = "task-3241849" [ 830.109125] env[69994]: _type = "Task" [ 830.109125] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.119659] env[69994]: DEBUG oslo_vmware.api [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3241849, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.157319] env[69994]: DEBUG nova.compute.utils [None req-c1399444-3dc1-4821-a62a-2baf311e4790 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Can not refresh info_cache because instance was not found {{(pid=69994) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1056}} [ 830.185128] env[69994]: DEBUG nova.network.neutron [None req-c1399444-3dc1-4821-a62a-2baf311e4790 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 830.212047] env[69994]: DEBUG nova.network.neutron [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Updating instance_info_cache with network_info: [{"id": "58907f85-0b65-4837-9d8e-da4ed1cf1be6", "address": "fa:16:3e:bf:d5:a6", "network": {"id": "2e51d0d8-050c-40b5-b20d-b754df62b09e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-955665594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95dff361679f4d3eb08daf6701c7ab82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d31a554-a94c-4471-892f-f65aa87b8279", "external-id": "nsx-vlan-transportzone-241", "segmentation_id": 241, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58907f85-0b", "ovs_interfaceid": "58907f85-0b65-4837-9d8e-da4ed1cf1be6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.337239] env[69994]: DEBUG nova.network.neutron [None req-c1399444-3dc1-4821-a62a-2baf311e4790 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.395181] env[69994]: DEBUG oslo_vmware.api [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241840, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.402624] env[69994]: DEBUG nova.scheduler.client.report [None req-7e6325bb-69fd-4ef8-a966-253a88f6f761 tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 830.466129] env[69994]: INFO nova.compute.manager [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Took 42.33 seconds to build instance. [ 830.603649] env[69994]: DEBUG oslo_vmware.api [None req-924d5545-1778-46c6-838a-748c49f55cf3 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3241848, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.197031} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.603649] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-924d5545-1778-46c6-838a-748c49f55cf3 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 830.603907] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-924d5545-1778-46c6-838a-748c49f55cf3 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 830.603907] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-924d5545-1778-46c6-838a-748c49f55cf3 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 830.604076] env[69994]: INFO nova.compute.manager [None req-924d5545-1778-46c6-838a-748c49f55cf3 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Took 1.14 seconds to destroy the instance on the hypervisor. [ 830.604344] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-924d5545-1778-46c6-838a-748c49f55cf3 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 830.604594] env[69994]: DEBUG nova.compute.manager [-] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 830.604658] env[69994]: DEBUG nova.network.neutron [-] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 830.620563] env[69994]: DEBUG oslo_vmware.api [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3241849, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.059727} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.620851] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=69994) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 830.621666] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08712dcd-a331-4548-9aa9-d4d078b57e7a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.654914] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] a4544bc9-6935-4825-9b45-2054d2ced330/ephemeral_0.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 830.657485] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b5e8cd02-2912-4cf5-a7bc-40643753e316 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.682146] env[69994]: DEBUG oslo_vmware.api [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Waiting for the task: (returnval){ [ 830.682146] env[69994]: value = "task-3241850" [ 830.682146] env[69994]: _type = "Task" [ 830.682146] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.695188] env[69994]: DEBUG oslo_vmware.api [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3241850, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.715615] env[69994]: DEBUG oslo_concurrency.lockutils [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Releasing lock "refresh_cache-8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 830.715972] env[69994]: DEBUG nova.compute.manager [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Instance network_info: |[{"id": "58907f85-0b65-4837-9d8e-da4ed1cf1be6", "address": "fa:16:3e:bf:d5:a6", "network": {"id": "2e51d0d8-050c-40b5-b20d-b754df62b09e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-955665594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95dff361679f4d3eb08daf6701c7ab82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d31a554-a94c-4471-892f-f65aa87b8279", "external-id": "nsx-vlan-transportzone-241", "segmentation_id": 241, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58907f85-0b", "ovs_interfaceid": "58907f85-0b65-4837-9d8e-da4ed1cf1be6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 830.716425] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bf:d5:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3d31a554-a94c-4471-892f-f65aa87b8279', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '58907f85-0b65-4837-9d8e-da4ed1cf1be6', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 830.724407] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Creating folder: Project (95dff361679f4d3eb08daf6701c7ab82). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 830.724528] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b4e646d7-f3db-4add-93e5-6fe91d39ddf4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.737917] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Created folder: Project (95dff361679f4d3eb08daf6701c7ab82) in parent group-v647729. [ 830.738139] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Creating folder: Instances. Parent ref: group-v647896. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 830.738405] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0b066146-6120-4461-9140-9ae87fdb77dd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.752216] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Created folder: Instances in parent group-v647896. [ 830.752486] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 830.752783] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 830.752989] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-094755fe-55f3-4f9c-b350-beafc1eb8c63 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.775030] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 830.775030] env[69994]: value = "task-3241853" [ 830.775030] env[69994]: _type = "Task" [ 830.775030] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.790970] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241853, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.840737] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c1399444-3dc1-4821-a62a-2baf311e4790 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Releasing lock "refresh_cache-0b284e71-7af2-4782-b950-4f7eac5221a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 830.841532] env[69994]: DEBUG nova.compute.manager [None req-c1399444-3dc1-4821-a62a-2baf311e4790 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 830.841849] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c1399444-3dc1-4821-a62a-2baf311e4790 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 830.842279] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fd4dc571-f18f-41f6-84c1-438b76afcb02 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.854256] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94fda43c-d46d-49b8-87f5-c50f11be99f9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.898411] env[69994]: WARNING nova.virt.vmwareapi.vmops [None req-c1399444-3dc1-4821-a62a-2baf311e4790 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0b284e71-7af2-4782-b950-4f7eac5221a4 could not be found. [ 830.898743] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c1399444-3dc1-4821-a62a-2baf311e4790 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 830.898838] env[69994]: INFO nova.compute.manager [None req-c1399444-3dc1-4821-a62a-2baf311e4790 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Took 0.06 seconds to destroy the instance on the hypervisor. [ 830.899312] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c1399444-3dc1-4821-a62a-2baf311e4790 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 830.902908] env[69994]: DEBUG nova.compute.manager [-] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 830.903080] env[69994]: DEBUG nova.network.neutron [-] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 830.913295] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7e6325bb-69fd-4ef8-a966-253a88f6f761 tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.321s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 830.915968] env[69994]: DEBUG oslo_vmware.api [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241840, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.916218] env[69994]: DEBUG oslo_concurrency.lockutils [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.684s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 830.917896] env[69994]: INFO nova.compute.claims [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 830.925914] env[69994]: DEBUG nova.network.neutron [-] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 830.951058] env[69994]: INFO nova.scheduler.client.report [None req-7e6325bb-69fd-4ef8-a966-253a88f6f761 tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Deleted allocations for instance 744fe018-d12c-44c2-98f1-c11fbfffc98e [ 830.970243] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b95eabb0-cf95-4480-b920-b39d5319cdd6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "566522b0-7aa7-4552-9be7-035d742ba394" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.048s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 831.039276] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquiring lock "ef410b09-8686-409e-8391-d50cd0e0df04" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 831.039732] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lock "ef410b09-8686-409e-8391-d50cd0e0df04" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 831.192079] env[69994]: DEBUG oslo_vmware.api [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3241850, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.286105] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241853, 'name': CreateVM_Task, 'duration_secs': 0.449728} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.286467] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 831.287401] env[69994]: DEBUG oslo_concurrency.lockutils [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.287696] env[69994]: DEBUG oslo_concurrency.lockutils [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 831.288116] env[69994]: DEBUG oslo_concurrency.lockutils [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 831.288522] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ce1a5c8-03ba-4a6c-8ec9-0e744409932c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.294543] env[69994]: DEBUG oslo_vmware.api [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Waiting for the task: (returnval){ [ 831.294543] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]523a012c-d987-8787-cf79-64c21cef7c59" [ 831.294543] env[69994]: _type = "Task" [ 831.294543] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.304377] env[69994]: DEBUG oslo_vmware.api [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]523a012c-d987-8787-cf79-64c21cef7c59, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.409164] env[69994]: DEBUG oslo_vmware.api [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241840, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.427957] env[69994]: DEBUG nova.network.neutron [-] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.461981] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7e6325bb-69fd-4ef8-a966-253a88f6f761 tempest-ServersTestManualDisk-1627606246 tempest-ServersTestManualDisk-1627606246-project-member] Lock "744fe018-d12c-44c2-98f1-c11fbfffc98e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.661s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 831.474068] env[69994]: DEBUG nova.compute.manager [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 831.480200] env[69994]: DEBUG nova.network.neutron [-] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.661070] env[69994]: DEBUG nova.compute.manager [req-151b0708-8997-4340-878f-0c73cd7828db req-bc2e72e2-7cb4-4ea2-9d16-b3cbbef80023 service nova] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Received event network-vif-deleted-7480edff-8738-4f3c-9cd6-e7d4036d475e {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 831.695563] env[69994]: DEBUG oslo_vmware.api [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3241850, 'name': ReconfigVM_Task, 'duration_secs': 0.560108} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.695860] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Reconfigured VM instance instance-00000036 to attach disk [datastore1] a4544bc9-6935-4825-9b45-2054d2ced330/ephemeral_0.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 831.696542] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-20e993b8-7e8c-4710-8ca7-d38be16cfeb4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.704639] env[69994]: DEBUG oslo_vmware.api [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Waiting for the task: (returnval){ [ 831.704639] env[69994]: value = "task-3241854" [ 831.704639] env[69994]: _type = "Task" [ 831.704639] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.714928] env[69994]: DEBUG oslo_vmware.api [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3241854, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.730256] env[69994]: DEBUG oslo_concurrency.lockutils [None req-400fdda3-6f30-4f28-abdc-76cdfe551237 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Acquiring lock "1d548f54-4ffa-4299-9212-717350558ad4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 831.730532] env[69994]: DEBUG oslo_concurrency.lockutils [None req-400fdda3-6f30-4f28-abdc-76cdfe551237 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Lock "1d548f54-4ffa-4299-9212-717350558ad4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 831.730706] env[69994]: DEBUG oslo_concurrency.lockutils [None req-400fdda3-6f30-4f28-abdc-76cdfe551237 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Acquiring lock "1d548f54-4ffa-4299-9212-717350558ad4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 831.730926] env[69994]: DEBUG oslo_concurrency.lockutils [None req-400fdda3-6f30-4f28-abdc-76cdfe551237 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Lock "1d548f54-4ffa-4299-9212-717350558ad4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 831.731092] env[69994]: DEBUG oslo_concurrency.lockutils [None req-400fdda3-6f30-4f28-abdc-76cdfe551237 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Lock "1d548f54-4ffa-4299-9212-717350558ad4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 831.734800] env[69994]: INFO nova.compute.manager [None req-400fdda3-6f30-4f28-abdc-76cdfe551237 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Terminating instance [ 831.749038] env[69994]: DEBUG nova.compute.manager [req-dd7569de-8ad6-4ec6-bef4-eb7f7121c24f req-6be2471a-e4a1-4c60-9ba6-e6ca00289690 service nova] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Received event network-changed-58907f85-0b65-4837-9d8e-da4ed1cf1be6 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 831.749038] env[69994]: DEBUG nova.compute.manager [req-dd7569de-8ad6-4ec6-bef4-eb7f7121c24f req-6be2471a-e4a1-4c60-9ba6-e6ca00289690 service nova] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Refreshing instance network info cache due to event network-changed-58907f85-0b65-4837-9d8e-da4ed1cf1be6. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 831.749038] env[69994]: DEBUG oslo_concurrency.lockutils [req-dd7569de-8ad6-4ec6-bef4-eb7f7121c24f req-6be2471a-e4a1-4c60-9ba6-e6ca00289690 service nova] Acquiring lock "refresh_cache-8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.749038] env[69994]: DEBUG oslo_concurrency.lockutils [req-dd7569de-8ad6-4ec6-bef4-eb7f7121c24f req-6be2471a-e4a1-4c60-9ba6-e6ca00289690 service nova] Acquired lock "refresh_cache-8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 831.749038] env[69994]: DEBUG nova.network.neutron [req-dd7569de-8ad6-4ec6-bef4-eb7f7121c24f req-6be2471a-e4a1-4c60-9ba6-e6ca00289690 service nova] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Refreshing network info cache for port 58907f85-0b65-4837-9d8e-da4ed1cf1be6 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 831.807447] env[69994]: DEBUG oslo_vmware.api [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]523a012c-d987-8787-cf79-64c21cef7c59, 'name': SearchDatastore_Task, 'duration_secs': 0.010002} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.807819] env[69994]: DEBUG oslo_concurrency.lockutils [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 831.808153] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 831.808360] env[69994]: DEBUG oslo_concurrency.lockutils [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.808575] env[69994]: DEBUG oslo_concurrency.lockutils [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 831.808841] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 831.809195] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f91b2c71-c258-4afd-bee0-a11ceeb6acdf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.821755] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 831.821930] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 831.822697] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f6be897-6bf1-474e-9a4a-b8111b71989f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.830394] env[69994]: DEBUG oslo_vmware.api [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Waiting for the task: (returnval){ [ 831.830394] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52dedc51-22f3-c4c7-ab3c-7fe1b61d3279" [ 831.830394] env[69994]: _type = "Task" [ 831.830394] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.842523] env[69994]: DEBUG oslo_vmware.api [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52dedc51-22f3-c4c7-ab3c-7fe1b61d3279, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.909694] env[69994]: DEBUG oslo_vmware.api [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241840, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.930487] env[69994]: INFO nova.compute.manager [-] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Took 1.03 seconds to deallocate network for instance. [ 831.982233] env[69994]: INFO nova.compute.manager [-] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Took 1.38 seconds to deallocate network for instance. [ 832.003930] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 832.227297] env[69994]: DEBUG oslo_vmware.api [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3241854, 'name': Rename_Task, 'duration_secs': 0.177299} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.227297] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 832.227297] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-89501832-3651-4cd6-9216-b516c0293516 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.237633] env[69994]: DEBUG oslo_vmware.api [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Waiting for the task: (returnval){ [ 832.237633] env[69994]: value = "task-3241855" [ 832.237633] env[69994]: _type = "Task" [ 832.237633] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.244172] env[69994]: DEBUG nova.compute.manager [None req-400fdda3-6f30-4f28-abdc-76cdfe551237 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 832.244399] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-400fdda3-6f30-4f28-abdc-76cdfe551237 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 832.246286] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c4e6176-6cf0-4ddd-bcc9-e85a64a9f05f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.256329] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-400fdda3-6f30-4f28-abdc-76cdfe551237 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 832.260357] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-04e369da-76d0-4c77-9cff-69088dd8bcd2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.262315] env[69994]: DEBUG oslo_vmware.api [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3241855, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.270393] env[69994]: DEBUG oslo_vmware.api [None req-400fdda3-6f30-4f28-abdc-76cdfe551237 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Waiting for the task: (returnval){ [ 832.270393] env[69994]: value = "task-3241856" [ 832.270393] env[69994]: _type = "Task" [ 832.270393] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.282224] env[69994]: DEBUG oslo_vmware.api [None req-400fdda3-6f30-4f28-abdc-76cdfe551237 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': task-3241856, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.350419] env[69994]: DEBUG oslo_vmware.api [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52dedc51-22f3-c4c7-ab3c-7fe1b61d3279, 'name': SearchDatastore_Task, 'duration_secs': 0.011267} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.352428] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19138f24-bcbf-405e-89f8-9f1dae9fd367 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.362040] env[69994]: DEBUG oslo_vmware.api [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Waiting for the task: (returnval){ [ 832.362040] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52050f27-958e-71d3-a236-ebde8175d0b1" [ 832.362040] env[69994]: _type = "Task" [ 832.362040] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.382347] env[69994]: DEBUG oslo_vmware.api [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52050f27-958e-71d3-a236-ebde8175d0b1, 'name': SearchDatastore_Task, 'duration_secs': 0.012813} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.386162] env[69994]: DEBUG oslo_concurrency.lockutils [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 832.386496] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf/8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 832.389023] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-52dee2b0-4596-4b84-8067-a8211fd9a197 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.398696] env[69994]: DEBUG oslo_vmware.api [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Waiting for the task: (returnval){ [ 832.398696] env[69994]: value = "task-3241857" [ 832.398696] env[69994]: _type = "Task" [ 832.398696] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.427206] env[69994]: DEBUG oslo_vmware.api [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': task-3241857, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.428691] env[69994]: DEBUG oslo_vmware.api [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241840, 'name': CloneVM_Task} progress is 95%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.440817] env[69994]: INFO nova.compute.manager [None req-c1399444-3dc1-4821-a62a-2baf311e4790 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Instance disappeared during terminate [ 832.441145] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c1399444-3dc1-4821-a62a-2baf311e4790 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Lock "0b284e71-7af2-4782-b950-4f7eac5221a4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.300s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.492405] env[69994]: DEBUG oslo_concurrency.lockutils [None req-924d5545-1778-46c6-838a-748c49f55cf3 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 832.647509] env[69994]: DEBUG nova.network.neutron [req-dd7569de-8ad6-4ec6-bef4-eb7f7121c24f req-6be2471a-e4a1-4c60-9ba6-e6ca00289690 service nova] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Updated VIF entry in instance network info cache for port 58907f85-0b65-4837-9d8e-da4ed1cf1be6. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 832.647858] env[69994]: DEBUG nova.network.neutron [req-dd7569de-8ad6-4ec6-bef4-eb7f7121c24f req-6be2471a-e4a1-4c60-9ba6-e6ca00289690 service nova] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Updating instance_info_cache with network_info: [{"id": "58907f85-0b65-4837-9d8e-da4ed1cf1be6", "address": "fa:16:3e:bf:d5:a6", "network": {"id": "2e51d0d8-050c-40b5-b20d-b754df62b09e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-955665594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95dff361679f4d3eb08daf6701c7ab82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d31a554-a94c-4471-892f-f65aa87b8279", "external-id": "nsx-vlan-transportzone-241", "segmentation_id": 241, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58907f85-0b", "ovs_interfaceid": "58907f85-0b65-4837-9d8e-da4ed1cf1be6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.729720] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6266557-99f9-448b-ad53-770ef3971da6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.748404] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1a1434d-ce21-45a5-8f58-572c9b16c628 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.756998] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquiring lock "ff645ae7-940e-4842-8915-a96d36d08067" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 832.757260] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "ff645ae7-940e-4842-8915-a96d36d08067" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 832.764064] env[69994]: DEBUG oslo_vmware.api [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3241855, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.799372] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0834fe2-f7a1-4c40-af5d-aa5a9e572363 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.811436] env[69994]: DEBUG oslo_vmware.api [None req-400fdda3-6f30-4f28-abdc-76cdfe551237 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': task-3241856, 'name': PowerOffVM_Task, 'duration_secs': 0.244642} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.811940] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-400fdda3-6f30-4f28-abdc-76cdfe551237 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 832.812191] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-400fdda3-6f30-4f28-abdc-76cdfe551237 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 832.813476] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4827616-e41e-4603-b438-c534df346eda {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.818866] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8991cce8-78c6-4785-9d86-eccabb248ba0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.833059] env[69994]: DEBUG nova.compute.provider_tree [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 832.899719] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-400fdda3-6f30-4f28-abdc-76cdfe551237 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 832.903048] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-400fdda3-6f30-4f28-abdc-76cdfe551237 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 832.903048] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-400fdda3-6f30-4f28-abdc-76cdfe551237 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Deleting the datastore file [datastore1] 1d548f54-4ffa-4299-9212-717350558ad4 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 832.904161] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-87138a36-2225-4895-a9a2-d47809cb77f2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.916439] env[69994]: DEBUG oslo_vmware.api [None req-400fdda3-6f30-4f28-abdc-76cdfe551237 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Waiting for the task: (returnval){ [ 832.916439] env[69994]: value = "task-3241859" [ 832.916439] env[69994]: _type = "Task" [ 832.916439] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.916807] env[69994]: DEBUG oslo_vmware.api [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': task-3241857, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.925240] env[69994]: DEBUG oslo_vmware.api [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241840, 'name': CloneVM_Task, 'duration_secs': 4.941785} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.928895] env[69994]: INFO nova.virt.vmwareapi.vmops [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Created linked-clone VM from snapshot [ 832.929243] env[69994]: DEBUG oslo_vmware.api [None req-400fdda3-6f30-4f28-abdc-76cdfe551237 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': task-3241859, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.930131] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67660cce-8cfb-4ed6-9e09-6acd6decc74c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.940705] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Uploading image 40acda84-5087-4e7b-a84d-2631d6a2f2c4 {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 832.989363] env[69994]: DEBUG oslo_vmware.rw_handles [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 832.989363] env[69994]: value = "vm-647895" [ 832.989363] env[69994]: _type = "VirtualMachine" [ 832.989363] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 832.989801] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-9c21a59e-0e25-477f-a03c-2c6c9da4e4e3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.001747] env[69994]: DEBUG oslo_vmware.rw_handles [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lease: (returnval){ [ 833.001747] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5234e2ad-271d-966d-a872-6f3a4cf7feec" [ 833.001747] env[69994]: _type = "HttpNfcLease" [ 833.001747] env[69994]: } obtained for exporting VM: (result){ [ 833.001747] env[69994]: value = "vm-647895" [ 833.001747] env[69994]: _type = "VirtualMachine" [ 833.001747] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 833.002342] env[69994]: DEBUG oslo_vmware.api [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the lease: (returnval){ [ 833.002342] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5234e2ad-271d-966d-a872-6f3a4cf7feec" [ 833.002342] env[69994]: _type = "HttpNfcLease" [ 833.002342] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 833.015215] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 833.015215] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5234e2ad-271d-966d-a872-6f3a4cf7feec" [ 833.015215] env[69994]: _type = "HttpNfcLease" [ 833.015215] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 833.151552] env[69994]: DEBUG oslo_concurrency.lockutils [req-dd7569de-8ad6-4ec6-bef4-eb7f7121c24f req-6be2471a-e4a1-4c60-9ba6-e6ca00289690 service nova] Releasing lock "refresh_cache-8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 833.258334] env[69994]: DEBUG oslo_vmware.api [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3241855, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.338675] env[69994]: DEBUG nova.scheduler.client.report [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 833.413919] env[69994]: DEBUG oslo_vmware.api [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': task-3241857, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.951885} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.414320] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf/8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 833.414557] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 833.414914] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-84c5c843-e7d7-4cb1-9239-999eed9c84ed {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.427426] env[69994]: DEBUG oslo_vmware.api [None req-400fdda3-6f30-4f28-abdc-76cdfe551237 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Task: {'id': task-3241859, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.273044} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.429173] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-400fdda3-6f30-4f28-abdc-76cdfe551237 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 833.429427] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-400fdda3-6f30-4f28-abdc-76cdfe551237 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 833.429715] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-400fdda3-6f30-4f28-abdc-76cdfe551237 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 833.430811] env[69994]: INFO nova.compute.manager [None req-400fdda3-6f30-4f28-abdc-76cdfe551237 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Took 1.19 seconds to destroy the instance on the hypervisor. [ 833.430811] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-400fdda3-6f30-4f28-abdc-76cdfe551237 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 833.430811] env[69994]: DEBUG oslo_vmware.api [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Waiting for the task: (returnval){ [ 833.430811] env[69994]: value = "task-3241861" [ 833.430811] env[69994]: _type = "Task" [ 833.430811] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.431169] env[69994]: DEBUG nova.compute.manager [-] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 833.431169] env[69994]: DEBUG nova.network.neutron [-] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 833.449943] env[69994]: DEBUG oslo_vmware.api [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': task-3241861, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.516931] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 833.516931] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5234e2ad-271d-966d-a872-6f3a4cf7feec" [ 833.516931] env[69994]: _type = "HttpNfcLease" [ 833.516931] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 833.517275] env[69994]: DEBUG oslo_vmware.rw_handles [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 833.517275] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5234e2ad-271d-966d-a872-6f3a4cf7feec" [ 833.517275] env[69994]: _type = "HttpNfcLease" [ 833.517275] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 833.522152] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad2ddf7d-a655-400d-8882-a4faf815f740 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.532864] env[69994]: DEBUG oslo_vmware.rw_handles [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524b7ed1-1fa8-c744-0275-6664f7283c16/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 833.532917] env[69994]: DEBUG oslo_vmware.rw_handles [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524b7ed1-1fa8-c744-0275-6664f7283c16/disk-0.vmdk for reading. {{(pid=69994) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 833.755372] env[69994]: DEBUG oslo_vmware.api [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3241855, 'name': PowerOnVM_Task, 'duration_secs': 1.312044} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.757887] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 833.757887] env[69994]: INFO nova.compute.manager [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Took 9.94 seconds to spawn the instance on the hypervisor. [ 833.757887] env[69994]: DEBUG nova.compute.manager [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 833.757887] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-756e7359-4816-431d-8696-b142c8ea211f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.775873] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-08c076b9-23fe-429d-a50e-1293d82535df {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.842819] env[69994]: DEBUG oslo_concurrency.lockutils [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.926s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 833.845039] env[69994]: DEBUG nova.compute.manager [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 833.847961] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21d3c0a7-1237-4d71-800a-5a96e5c0dd1d tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.672s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 833.848233] env[69994]: DEBUG nova.objects.instance [None req-21d3c0a7-1237-4d71-800a-5a96e5c0dd1d tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Lazy-loading 'resources' on Instance uuid 289cbcc2-cd8f-4c4f-9169-a897f5527de1 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 833.951623] env[69994]: DEBUG oslo_vmware.api [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': task-3241861, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090336} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.951971] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 833.953233] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8e7dd46-f244-4bde-a6b2-400144a88417 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.977187] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Reconfiguring VM instance instance-00000037 to attach disk [datastore2] 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf/8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 833.978030] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8ae9c063-c153-4ab2-b8fa-0cb6c730bbcc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.997674] env[69994]: DEBUG nova.compute.manager [req-4dab25e3-93c6-4b6c-a034-9c9dfa5ca7c8 req-ad347483-1041-44be-8619-c9cf727d51ea service nova] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Received event network-vif-deleted-d451c9f0-137b-44de-a79c-ec92c6f843bc {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 833.997865] env[69994]: INFO nova.compute.manager [req-4dab25e3-93c6-4b6c-a034-9c9dfa5ca7c8 req-ad347483-1041-44be-8619-c9cf727d51ea service nova] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Neutron deleted interface d451c9f0-137b-44de-a79c-ec92c6f843bc; detaching it from the instance and deleting it from the info cache [ 834.000466] env[69994]: DEBUG nova.network.neutron [req-4dab25e3-93c6-4b6c-a034-9c9dfa5ca7c8 req-ad347483-1041-44be-8619-c9cf727d51ea service nova] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.011382] env[69994]: DEBUG oslo_vmware.api [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Waiting for the task: (returnval){ [ 834.011382] env[69994]: value = "task-3241862" [ 834.011382] env[69994]: _type = "Task" [ 834.011382] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.024683] env[69994]: DEBUG oslo_vmware.api [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': task-3241862, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.286024] env[69994]: INFO nova.compute.manager [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Took 45.14 seconds to build instance. [ 834.349656] env[69994]: DEBUG nova.compute.utils [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 834.351436] env[69994]: DEBUG nova.compute.manager [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 834.351628] env[69994]: DEBUG nova.network.neutron [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 834.357622] env[69994]: DEBUG nova.network.neutron [-] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.445134] env[69994]: DEBUG nova.policy [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b7bd23f795a945da81d522665f427f5a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8c29d56501a3486892ec8e14e9194bd9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 834.504909] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ff0831b8-beaa-49c5-9af3-d30e37f7b328 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.532737] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b1c2ed0-c9ba-4232-be43-3ab519dd442f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.561674] env[69994]: DEBUG oslo_vmware.api [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': task-3241862, 'name': ReconfigVM_Task, 'duration_secs': 0.49281} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.562116] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Reconfigured VM instance instance-00000037 to attach disk [datastore2] 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf/8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 834.562888] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bd56b083-7573-4b2e-9cc2-a7e726ada00e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.589190] env[69994]: DEBUG nova.compute.manager [req-4dab25e3-93c6-4b6c-a034-9c9dfa5ca7c8 req-ad347483-1041-44be-8619-c9cf727d51ea service nova] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Detach interface failed, port_id=d451c9f0-137b-44de-a79c-ec92c6f843bc, reason: Instance 1d548f54-4ffa-4299-9212-717350558ad4 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 834.589895] env[69994]: DEBUG oslo_vmware.api [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Waiting for the task: (returnval){ [ 834.589895] env[69994]: value = "task-3241863" [ 834.589895] env[69994]: _type = "Task" [ 834.589895] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.600751] env[69994]: DEBUG oslo_vmware.api [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': task-3241863, 'name': Rename_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.671255] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7dee3b5f-b513-4e81-8aff-bea9bafd7982 tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Acquiring lock "2d812174-d2ad-4fac-8ae5-ffa51d691374" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 834.671559] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7dee3b5f-b513-4e81-8aff-bea9bafd7982 tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Lock "2d812174-d2ad-4fac-8ae5-ffa51d691374" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 834.671978] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7dee3b5f-b513-4e81-8aff-bea9bafd7982 tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Acquiring lock "2d812174-d2ad-4fac-8ae5-ffa51d691374-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 834.672312] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7dee3b5f-b513-4e81-8aff-bea9bafd7982 tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Lock "2d812174-d2ad-4fac-8ae5-ffa51d691374-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 834.673987] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7dee3b5f-b513-4e81-8aff-bea9bafd7982 tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Lock "2d812174-d2ad-4fac-8ae5-ffa51d691374-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.002s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 834.680424] env[69994]: INFO nova.compute.manager [None req-7dee3b5f-b513-4e81-8aff-bea9bafd7982 tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Terminating instance [ 834.792153] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d4b73dfa-e710-46f3-8710-0d777fb0de62 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Lock "a4544bc9-6935-4825-9b45-2054d2ced330" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.527s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 834.856057] env[69994]: DEBUG nova.compute.manager [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 834.866440] env[69994]: INFO nova.compute.manager [-] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Took 1.44 seconds to deallocate network for instance. [ 835.106052] env[69994]: DEBUG oslo_vmware.api [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': task-3241863, 'name': Rename_Task, 'duration_secs': 0.309677} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.108404] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 835.109087] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9ae54eda-8d5e-4aef-97f2-5e20acb7f9f0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.118896] env[69994]: DEBUG nova.network.neutron [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Successfully created port: aa1e87cd-049a-4e57-bca5-4a5c7f144c90 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 835.123022] env[69994]: DEBUG oslo_vmware.api [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Waiting for the task: (returnval){ [ 835.123022] env[69994]: value = "task-3241864" [ 835.123022] env[69994]: _type = "Task" [ 835.123022] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.139134] env[69994]: DEBUG oslo_vmware.api [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': task-3241864, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.181920] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba16936d-c113-48c4-8d64-c24831dd2749 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.188338] env[69994]: DEBUG nova.compute.manager [None req-7dee3b5f-b513-4e81-8aff-bea9bafd7982 tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 835.188499] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7dee3b5f-b513-4e81-8aff-bea9bafd7982 tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 835.191973] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6b4bb7e-5eef-455b-813d-97bf09fa6991 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.198694] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a74082ee-5085-4cea-96de-7d59f58deff2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.209234] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dee3b5f-b513-4e81-8aff-bea9bafd7982 tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 835.246039] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d34728bf-2309-44e5-a4ca-43066d0bffa5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.250588] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d1a0cc8-d9c7-42ec-b9fc-5643cd189592 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.263576] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f2111fa-36a1-431b-8901-53cf689a2782 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.268067] env[69994]: DEBUG oslo_vmware.api [None req-7dee3b5f-b513-4e81-8aff-bea9bafd7982 tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Waiting for the task: (returnval){ [ 835.268067] env[69994]: value = "task-3241865" [ 835.268067] env[69994]: _type = "Task" [ 835.268067] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.285901] env[69994]: DEBUG nova.compute.provider_tree [None req-21d3c0a7-1237-4d71-800a-5a96e5c0dd1d tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 835.295087] env[69994]: DEBUG nova.compute.manager [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 835.299168] env[69994]: DEBUG oslo_vmware.api [None req-7dee3b5f-b513-4e81-8aff-bea9bafd7982 tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Task: {'id': task-3241865, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.374811] env[69994]: DEBUG oslo_concurrency.lockutils [None req-400fdda3-6f30-4f28-abdc-76cdfe551237 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 835.635863] env[69994]: DEBUG oslo_vmware.api [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': task-3241864, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.783530] env[69994]: DEBUG oslo_vmware.api [None req-7dee3b5f-b513-4e81-8aff-bea9bafd7982 tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Task: {'id': task-3241865, 'name': PowerOffVM_Task, 'duration_secs': 0.249038} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.785274] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dee3b5f-b513-4e81-8aff-bea9bafd7982 tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 835.785274] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7dee3b5f-b513-4e81-8aff-bea9bafd7982 tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 835.786434] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d6396678-6314-4341-a7e3-bec207838e5d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.790585] env[69994]: DEBUG nova.scheduler.client.report [None req-21d3c0a7-1237-4d71-800a-5a96e5c0dd1d tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 835.833857] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 835.875321] env[69994]: DEBUG nova.compute.manager [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 835.895184] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7dee3b5f-b513-4e81-8aff-bea9bafd7982 tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 835.895184] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7dee3b5f-b513-4e81-8aff-bea9bafd7982 tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 835.895361] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-7dee3b5f-b513-4e81-8aff-bea9bafd7982 tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Deleting the datastore file [datastore1] 2d812174-d2ad-4fac-8ae5-ffa51d691374 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 835.895647] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9717bf95-7134-48ad-b151-dfa31e160602 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.905697] env[69994]: DEBUG oslo_vmware.api [None req-7dee3b5f-b513-4e81-8aff-bea9bafd7982 tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Waiting for the task: (returnval){ [ 835.905697] env[69994]: value = "task-3241867" [ 835.905697] env[69994]: _type = "Task" [ 835.905697] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.925561] env[69994]: DEBUG oslo_vmware.api [None req-7dee3b5f-b513-4e81-8aff-bea9bafd7982 tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Task: {'id': task-3241867, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.929042] env[69994]: DEBUG nova.virt.hardware [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 835.929248] env[69994]: DEBUG nova.virt.hardware [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 835.931055] env[69994]: DEBUG nova.virt.hardware [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 835.931055] env[69994]: DEBUG nova.virt.hardware [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 835.931055] env[69994]: DEBUG nova.virt.hardware [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 835.931055] env[69994]: DEBUG nova.virt.hardware [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 835.931055] env[69994]: DEBUG nova.virt.hardware [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 835.931055] env[69994]: DEBUG nova.virt.hardware [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 835.931055] env[69994]: DEBUG nova.virt.hardware [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 835.931055] env[69994]: DEBUG nova.virt.hardware [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 835.931055] env[69994]: DEBUG nova.virt.hardware [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 835.932139] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01eb319c-dc2f-4664-850f-41e6b08e39f0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.943736] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98c25699-7858-4c55-9c98-c2310591dffb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.066063] env[69994]: DEBUG nova.compute.manager [req-0cd7a37b-d1ad-4d44-ad96-5f362bd269d1 req-3f4a8875-d6fb-4ccb-8694-baefbe3bac6f service nova] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Received event network-changed-7f7c4dd0-5c90-4dd1-8113-b871712bb2f7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 836.066376] env[69994]: DEBUG nova.compute.manager [req-0cd7a37b-d1ad-4d44-ad96-5f362bd269d1 req-3f4a8875-d6fb-4ccb-8694-baefbe3bac6f service nova] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Refreshing instance network info cache due to event network-changed-7f7c4dd0-5c90-4dd1-8113-b871712bb2f7. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 836.066752] env[69994]: DEBUG oslo_concurrency.lockutils [req-0cd7a37b-d1ad-4d44-ad96-5f362bd269d1 req-3f4a8875-d6fb-4ccb-8694-baefbe3bac6f service nova] Acquiring lock "refresh_cache-a4544bc9-6935-4825-9b45-2054d2ced330" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.066986] env[69994]: DEBUG oslo_concurrency.lockutils [req-0cd7a37b-d1ad-4d44-ad96-5f362bd269d1 req-3f4a8875-d6fb-4ccb-8694-baefbe3bac6f service nova] Acquired lock "refresh_cache-a4544bc9-6935-4825-9b45-2054d2ced330" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 836.067297] env[69994]: DEBUG nova.network.neutron [req-0cd7a37b-d1ad-4d44-ad96-5f362bd269d1 req-3f4a8875-d6fb-4ccb-8694-baefbe3bac6f service nova] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Refreshing network info cache for port 7f7c4dd0-5c90-4dd1-8113-b871712bb2f7 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 836.136845] env[69994]: DEBUG oslo_vmware.api [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': task-3241864, 'name': PowerOnVM_Task, 'duration_secs': 0.782126} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.136845] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 836.136845] env[69994]: INFO nova.compute.manager [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Took 7.85 seconds to spawn the instance on the hypervisor. [ 836.136845] env[69994]: DEBUG nova.compute.manager [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 836.138036] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-086964bf-32b4-45e8-bb9f-1d90409efc58 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.298669] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21d3c0a7-1237-4d71-800a-5a96e5c0dd1d tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.451s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 836.304216] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5a1a93c2-55d2-43eb-a5b0-66e8df52d3ff tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.046s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 836.307732] env[69994]: DEBUG nova.objects.instance [None req-5a1a93c2-55d2-43eb-a5b0-66e8df52d3ff tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Lazy-loading 'resources' on Instance uuid 86e514bb-8b47-4605-bd85-55c6c9874320 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 836.335876] env[69994]: INFO nova.scheduler.client.report [None req-21d3c0a7-1237-4d71-800a-5a96e5c0dd1d tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Deleted allocations for instance 289cbcc2-cd8f-4c4f-9169-a897f5527de1 [ 836.420125] env[69994]: DEBUG oslo_vmware.api [None req-7dee3b5f-b513-4e81-8aff-bea9bafd7982 tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Task: {'id': task-3241867, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.290882} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.420513] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-7dee3b5f-b513-4e81-8aff-bea9bafd7982 tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 836.420753] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7dee3b5f-b513-4e81-8aff-bea9bafd7982 tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 836.421035] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7dee3b5f-b513-4e81-8aff-bea9bafd7982 tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 836.421175] env[69994]: INFO nova.compute.manager [None req-7dee3b5f-b513-4e81-8aff-bea9bafd7982 tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Took 1.23 seconds to destroy the instance on the hypervisor. [ 836.421499] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7dee3b5f-b513-4e81-8aff-bea9bafd7982 tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 836.421765] env[69994]: DEBUG nova.compute.manager [-] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 836.421919] env[69994]: DEBUG nova.network.neutron [-] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 836.672302] env[69994]: INFO nova.compute.manager [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Took 43.04 seconds to build instance. [ 836.850285] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21d3c0a7-1237-4d71-800a-5a96e5c0dd1d tempest-ServersTestFqdnHostnames-1986607463 tempest-ServersTestFqdnHostnames-1986607463-project-member] Lock "289cbcc2-cd8f-4c4f-9169-a897f5527de1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.519s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 837.065606] env[69994]: DEBUG nova.network.neutron [req-0cd7a37b-d1ad-4d44-ad96-5f362bd269d1 req-3f4a8875-d6fb-4ccb-8694-baefbe3bac6f service nova] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Updated VIF entry in instance network info cache for port 7f7c4dd0-5c90-4dd1-8113-b871712bb2f7. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 837.066017] env[69994]: DEBUG nova.network.neutron [req-0cd7a37b-d1ad-4d44-ad96-5f362bd269d1 req-3f4a8875-d6fb-4ccb-8694-baefbe3bac6f service nova] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Updating instance_info_cache with network_info: [{"id": "7f7c4dd0-5c90-4dd1-8113-b871712bb2f7", "address": "fa:16:3e:97:15:a8", "network": {"id": "c152967f-46eb-4bab-a1fd-985f8d1e41e3", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-381864411-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6356297e311c4b47b689a7cda41127f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "274afb4c-04df-4213-8ad2-8f48a10d78a8", "external-id": "nsx-vlan-transportzone-515", "segmentation_id": 515, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f7c4dd0-5c", "ovs_interfaceid": "7f7c4dd0-5c90-4dd1-8113-b871712bb2f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.176365] env[69994]: DEBUG oslo_concurrency.lockutils [None req-30ecb4a5-d82b-4e15-9a66-b8c7fc71e46f tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Lock "8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.881s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 837.453551] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a770d35c-5998-4018-9b3a-7d0d83f33efe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.464846] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-357881f6-c026-4251-acd8-21cabb040cf2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.504786] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9edd0f01-6620-4d03-998c-4e4b5e281f64 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.515735] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afbc7c3a-6af6-4a78-a22b-a2fb3320e15b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.531406] env[69994]: DEBUG nova.network.neutron [-] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.533305] env[69994]: DEBUG nova.compute.provider_tree [None req-5a1a93c2-55d2-43eb-a5b0-66e8df52d3ff tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 837.572260] env[69994]: DEBUG oslo_concurrency.lockutils [req-0cd7a37b-d1ad-4d44-ad96-5f362bd269d1 req-3f4a8875-d6fb-4ccb-8694-baefbe3bac6f service nova] Releasing lock "refresh_cache-a4544bc9-6935-4825-9b45-2054d2ced330" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 837.680010] env[69994]: DEBUG nova.compute.manager [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 837.764766] env[69994]: DEBUG nova.network.neutron [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Successfully updated port: aa1e87cd-049a-4e57-bca5-4a5c7f144c90 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 837.909258] env[69994]: DEBUG nova.compute.manager [req-e3502db6-d5ef-47ab-9f4f-175f96af90fc req-c474bb6d-2b59-4160-a033-a35513d7e3d9 service nova] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Received event network-vif-plugged-aa1e87cd-049a-4e57-bca5-4a5c7f144c90 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 837.909631] env[69994]: DEBUG oslo_concurrency.lockutils [req-e3502db6-d5ef-47ab-9f4f-175f96af90fc req-c474bb6d-2b59-4160-a033-a35513d7e3d9 service nova] Acquiring lock "6d99c52e-8893-4ad7-8d8e-56bd8c9379b8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 837.909904] env[69994]: DEBUG oslo_concurrency.lockutils [req-e3502db6-d5ef-47ab-9f4f-175f96af90fc req-c474bb6d-2b59-4160-a033-a35513d7e3d9 service nova] Lock "6d99c52e-8893-4ad7-8d8e-56bd8c9379b8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 837.910093] env[69994]: DEBUG oslo_concurrency.lockutils [req-e3502db6-d5ef-47ab-9f4f-175f96af90fc req-c474bb6d-2b59-4160-a033-a35513d7e3d9 service nova] Lock "6d99c52e-8893-4ad7-8d8e-56bd8c9379b8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 837.910267] env[69994]: DEBUG nova.compute.manager [req-e3502db6-d5ef-47ab-9f4f-175f96af90fc req-c474bb6d-2b59-4160-a033-a35513d7e3d9 service nova] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] No waiting events found dispatching network-vif-plugged-aa1e87cd-049a-4e57-bca5-4a5c7f144c90 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 837.910426] env[69994]: WARNING nova.compute.manager [req-e3502db6-d5ef-47ab-9f4f-175f96af90fc req-c474bb6d-2b59-4160-a033-a35513d7e3d9 service nova] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Received unexpected event network-vif-plugged-aa1e87cd-049a-4e57-bca5-4a5c7f144c90 for instance with vm_state building and task_state spawning. [ 838.035147] env[69994]: INFO nova.compute.manager [-] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Took 1.61 seconds to deallocate network for instance. [ 838.036051] env[69994]: DEBUG nova.scheduler.client.report [None req-5a1a93c2-55d2-43eb-a5b0-66e8df52d3ff tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 838.209409] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 838.269267] env[69994]: DEBUG oslo_concurrency.lockutils [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Acquiring lock "refresh_cache-6d99c52e-8893-4ad7-8d8e-56bd8c9379b8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.269267] env[69994]: DEBUG oslo_concurrency.lockutils [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Acquired lock "refresh_cache-6d99c52e-8893-4ad7-8d8e-56bd8c9379b8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 838.269267] env[69994]: DEBUG nova.network.neutron [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 838.342462] env[69994]: DEBUG nova.compute.manager [req-2e0b751e-3e7e-4100-ba29-44211bec76ca req-c1cd759c-978d-45fa-a0a3-8475df413a27 service nova] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Received event network-vif-deleted-2027b219-02f1-4669-80e5-0d03b45b5562 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 838.547984] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5a1a93c2-55d2-43eb-a5b0-66e8df52d3ff tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.244s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 838.551573] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.211s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 838.552054] env[69994]: INFO nova.compute.claims [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 838.555625] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7dee3b5f-b513-4e81-8aff-bea9bafd7982 tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 838.576506] env[69994]: INFO nova.scheduler.client.report [None req-5a1a93c2-55d2-43eb-a5b0-66e8df52d3ff tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Deleted allocations for instance 86e514bb-8b47-4605-bd85-55c6c9874320 [ 838.843402] env[69994]: DEBUG nova.network.neutron [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 839.087601] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5a1a93c2-55d2-43eb-a5b0-66e8df52d3ff tempest-ServersNegativeTestMultiTenantJSON-1852705814 tempest-ServersNegativeTestMultiTenantJSON-1852705814-project-member] Lock "86e514bb-8b47-4605-bd85-55c6c9874320" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.537s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 839.222034] env[69994]: DEBUG nova.network.neutron [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Updating instance_info_cache with network_info: [{"id": "aa1e87cd-049a-4e57-bca5-4a5c7f144c90", "address": "fa:16:3e:2e:48:73", "network": {"id": "a027fb7d-a15d-407c-b6cd-c5d136279c52", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1909751472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c29d56501a3486892ec8e14e9194bd9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa1e87cd-04", "ovs_interfaceid": "aa1e87cd-049a-4e57-bca5-4a5c7f144c90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.727331] env[69994]: DEBUG oslo_concurrency.lockutils [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Releasing lock "refresh_cache-6d99c52e-8893-4ad7-8d8e-56bd8c9379b8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 839.727331] env[69994]: DEBUG nova.compute.manager [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Instance network_info: |[{"id": "aa1e87cd-049a-4e57-bca5-4a5c7f144c90", "address": "fa:16:3e:2e:48:73", "network": {"id": "a027fb7d-a15d-407c-b6cd-c5d136279c52", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1909751472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c29d56501a3486892ec8e14e9194bd9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa1e87cd-04", "ovs_interfaceid": "aa1e87cd-049a-4e57-bca5-4a5c7f144c90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 839.730842] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2e:48:73', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '25f42474-5594-4733-a681-6c69f4afb946', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'aa1e87cd-049a-4e57-bca5-4a5c7f144c90', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 839.740944] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Creating folder: Project (8c29d56501a3486892ec8e14e9194bd9). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 839.742200] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fa6b22a6-1438-46ba-a79d-9d63bb7d7afd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.760255] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Created folder: Project (8c29d56501a3486892ec8e14e9194bd9) in parent group-v647729. [ 839.760609] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Creating folder: Instances. Parent ref: group-v647899. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 839.760876] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f3644d5f-fe48-48bb-af74-66dd5a514f6f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.774296] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Created folder: Instances in parent group-v647899. [ 839.774754] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 839.777858] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 839.778806] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-99022535-12f1-4092-a797-3028f85c3a3d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.812565] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 839.812565] env[69994]: value = "task-3241870" [ 839.812565] env[69994]: _type = "Task" [ 839.812565] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.832166] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241870, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.209940] env[69994]: DEBUG nova.compute.manager [req-a1591c85-fe18-41e7-8b47-a74258c44400 req-7bf93286-38bb-4257-8117-a729cd793f5c service nova] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Received event network-changed-aa1e87cd-049a-4e57-bca5-4a5c7f144c90 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 840.209940] env[69994]: DEBUG nova.compute.manager [req-a1591c85-fe18-41e7-8b47-a74258c44400 req-7bf93286-38bb-4257-8117-a729cd793f5c service nova] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Refreshing instance network info cache due to event network-changed-aa1e87cd-049a-4e57-bca5-4a5c7f144c90. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 840.209940] env[69994]: DEBUG oslo_concurrency.lockutils [req-a1591c85-fe18-41e7-8b47-a74258c44400 req-7bf93286-38bb-4257-8117-a729cd793f5c service nova] Acquiring lock "refresh_cache-6d99c52e-8893-4ad7-8d8e-56bd8c9379b8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.209940] env[69994]: DEBUG oslo_concurrency.lockutils [req-a1591c85-fe18-41e7-8b47-a74258c44400 req-7bf93286-38bb-4257-8117-a729cd793f5c service nova] Acquired lock "refresh_cache-6d99c52e-8893-4ad7-8d8e-56bd8c9379b8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 840.209940] env[69994]: DEBUG nova.network.neutron [req-a1591c85-fe18-41e7-8b47-a74258c44400 req-7bf93286-38bb-4257-8117-a729cd793f5c service nova] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Refreshing network info cache for port aa1e87cd-049a-4e57-bca5-4a5c7f144c90 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 840.273239] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af05ee7b-4b81-4b81-8e92-1833433f0386 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.283230] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51a6edd7-450d-4d1f-8204-51eacd2aefeb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.322635] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9200e7d5-a05a-44ba-b5d4-f27f08fa9a0a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.334406] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-340010bd-cfa3-4198-bff8-9e176196ff97 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.339504] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241870, 'name': CreateVM_Task, 'duration_secs': 0.517258} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.339504] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 840.339504] env[69994]: DEBUG oslo_concurrency.lockutils [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.340139] env[69994]: DEBUG oslo_concurrency.lockutils [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 840.340139] env[69994]: DEBUG oslo_concurrency.lockutils [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 840.340995] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39d38351-431c-46c9-bae7-bfccb7d2174c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.350507] env[69994]: DEBUG nova.compute.provider_tree [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 840.355190] env[69994]: DEBUG oslo_vmware.api [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Waiting for the task: (returnval){ [ 840.355190] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]523869f5-3422-44dc-d2be-1b0c6003153d" [ 840.355190] env[69994]: _type = "Task" [ 840.355190] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.365484] env[69994]: DEBUG oslo_vmware.api [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]523869f5-3422-44dc-d2be-1b0c6003153d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.854613] env[69994]: DEBUG nova.scheduler.client.report [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 840.869475] env[69994]: DEBUG oslo_vmware.api [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]523869f5-3422-44dc-d2be-1b0c6003153d, 'name': SearchDatastore_Task, 'duration_secs': 0.020234} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.869830] env[69994]: DEBUG oslo_concurrency.lockutils [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 840.870093] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 840.870344] env[69994]: DEBUG oslo_concurrency.lockutils [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.870491] env[69994]: DEBUG oslo_concurrency.lockutils [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 840.870671] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 840.870943] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3fb11503-1425-415b-8a34-cc21c4668386 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.885986] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 840.886222] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 840.887266] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7832da9-0a65-4377-a8ec-7ea62f38825d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.896176] env[69994]: DEBUG oslo_vmware.api [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Waiting for the task: (returnval){ [ 840.896176] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5273e9bd-0a4e-39fd-0bfe-4379056a9ea5" [ 840.896176] env[69994]: _type = "Task" [ 840.896176] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.907607] env[69994]: DEBUG oslo_vmware.api [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5273e9bd-0a4e-39fd-0bfe-4379056a9ea5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.052171] env[69994]: DEBUG nova.network.neutron [req-a1591c85-fe18-41e7-8b47-a74258c44400 req-7bf93286-38bb-4257-8117-a729cd793f5c service nova] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Updated VIF entry in instance network info cache for port aa1e87cd-049a-4e57-bca5-4a5c7f144c90. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 841.052559] env[69994]: DEBUG nova.network.neutron [req-a1591c85-fe18-41e7-8b47-a74258c44400 req-7bf93286-38bb-4257-8117-a729cd793f5c service nova] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Updating instance_info_cache with network_info: [{"id": "aa1e87cd-049a-4e57-bca5-4a5c7f144c90", "address": "fa:16:3e:2e:48:73", "network": {"id": "a027fb7d-a15d-407c-b6cd-c5d136279c52", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1909751472-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c29d56501a3486892ec8e14e9194bd9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa1e87cd-04", "ovs_interfaceid": "aa1e87cd-049a-4e57-bca5-4a5c7f144c90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.364662] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.814s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 841.365348] env[69994]: DEBUG nova.compute.manager [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 841.369635] env[69994]: DEBUG oslo_concurrency.lockutils [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 34.964s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 841.414211] env[69994]: DEBUG oslo_vmware.api [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5273e9bd-0a4e-39fd-0bfe-4379056a9ea5, 'name': SearchDatastore_Task, 'duration_secs': 0.020515} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.415139] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5ea05ff-3976-499d-8cd2-5bd6ee3146c3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.422106] env[69994]: DEBUG oslo_vmware.api [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Waiting for the task: (returnval){ [ 841.422106] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52b5f729-f100-74ec-4018-2e525ec06087" [ 841.422106] env[69994]: _type = "Task" [ 841.422106] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.438330] env[69994]: DEBUG oslo_vmware.api [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52b5f729-f100-74ec-4018-2e525ec06087, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.560412] env[69994]: DEBUG oslo_concurrency.lockutils [req-a1591c85-fe18-41e7-8b47-a74258c44400 req-7bf93286-38bb-4257-8117-a729cd793f5c service nova] Releasing lock "refresh_cache-6d99c52e-8893-4ad7-8d8e-56bd8c9379b8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 841.560858] env[69994]: DEBUG nova.compute.manager [req-a1591c85-fe18-41e7-8b47-a74258c44400 req-7bf93286-38bb-4257-8117-a729cd793f5c service nova] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Received event network-changed-58907f85-0b65-4837-9d8e-da4ed1cf1be6 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 841.561172] env[69994]: DEBUG nova.compute.manager [req-a1591c85-fe18-41e7-8b47-a74258c44400 req-7bf93286-38bb-4257-8117-a729cd793f5c service nova] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Refreshing instance network info cache due to event network-changed-58907f85-0b65-4837-9d8e-da4ed1cf1be6. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 841.561534] env[69994]: DEBUG oslo_concurrency.lockutils [req-a1591c85-fe18-41e7-8b47-a74258c44400 req-7bf93286-38bb-4257-8117-a729cd793f5c service nova] Acquiring lock "refresh_cache-8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.561796] env[69994]: DEBUG oslo_concurrency.lockutils [req-a1591c85-fe18-41e7-8b47-a74258c44400 req-7bf93286-38bb-4257-8117-a729cd793f5c service nova] Acquired lock "refresh_cache-8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 841.562096] env[69994]: DEBUG nova.network.neutron [req-a1591c85-fe18-41e7-8b47-a74258c44400 req-7bf93286-38bb-4257-8117-a729cd793f5c service nova] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Refreshing network info cache for port 58907f85-0b65-4837-9d8e-da4ed1cf1be6 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 841.873867] env[69994]: DEBUG nova.compute.utils [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 841.882578] env[69994]: INFO nova.compute.claims [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 841.887331] env[69994]: DEBUG nova.compute.manager [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 841.887551] env[69994]: DEBUG nova.network.neutron [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 841.936261] env[69994]: DEBUG oslo_vmware.api [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52b5f729-f100-74ec-4018-2e525ec06087, 'name': SearchDatastore_Task, 'duration_secs': 0.019247} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.936528] env[69994]: DEBUG oslo_concurrency.lockutils [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 841.937926] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8/6d99c52e-8893-4ad7-8d8e-56bd8c9379b8.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 841.937926] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-75595f08-7d76-4bd9-83e0-8fee4aa70e8c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.946537] env[69994]: DEBUG oslo_vmware.api [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Waiting for the task: (returnval){ [ 841.946537] env[69994]: value = "task-3241874" [ 841.946537] env[69994]: _type = "Task" [ 841.946537] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.951121] env[69994]: DEBUG nova.policy [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '41e25eb110b14afdb3e67bd4dd943e9c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e8b8ab56b87c46f9b960fc3b430197d3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 841.958698] env[69994]: DEBUG oslo_vmware.api [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Task: {'id': task-3241874, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.387895] env[69994]: DEBUG nova.compute.manager [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 842.393107] env[69994]: INFO nova.compute.resource_tracker [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Updating resource usage from migration 95a615f0-9414-47d5-965f-8bcf9c644849 [ 842.446881] env[69994]: DEBUG nova.network.neutron [req-a1591c85-fe18-41e7-8b47-a74258c44400 req-7bf93286-38bb-4257-8117-a729cd793f5c service nova] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Updated VIF entry in instance network info cache for port 58907f85-0b65-4837-9d8e-da4ed1cf1be6. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 842.446940] env[69994]: DEBUG nova.network.neutron [req-a1591c85-fe18-41e7-8b47-a74258c44400 req-7bf93286-38bb-4257-8117-a729cd793f5c service nova] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Updating instance_info_cache with network_info: [{"id": "58907f85-0b65-4837-9d8e-da4ed1cf1be6", "address": "fa:16:3e:bf:d5:a6", "network": {"id": "2e51d0d8-050c-40b5-b20d-b754df62b09e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-955665594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95dff361679f4d3eb08daf6701c7ab82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d31a554-a94c-4471-892f-f65aa87b8279", "external-id": "nsx-vlan-transportzone-241", "segmentation_id": 241, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58907f85-0b", "ovs_interfaceid": "58907f85-0b65-4837-9d8e-da4ed1cf1be6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.461217] env[69994]: DEBUG oslo_vmware.api [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Task: {'id': task-3241874, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.506453} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.461217] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8/6d99c52e-8893-4ad7-8d8e-56bd8c9379b8.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 842.461346] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 842.461598] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cc52c83c-f561-4eef-9496-16e2878a666f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.478684] env[69994]: DEBUG oslo_vmware.api [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Waiting for the task: (returnval){ [ 842.478684] env[69994]: value = "task-3241875" [ 842.478684] env[69994]: _type = "Task" [ 842.478684] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.488318] env[69994]: DEBUG oslo_vmware.api [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Task: {'id': task-3241875, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.834850] env[69994]: DEBUG nova.network.neutron [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Successfully created port: b6545d7e-9893-450b-9d3b-67d4d7affbe4 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 842.954277] env[69994]: DEBUG oslo_concurrency.lockutils [req-a1591c85-fe18-41e7-8b47-a74258c44400 req-7bf93286-38bb-4257-8117-a729cd793f5c service nova] Releasing lock "refresh_cache-8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 842.954553] env[69994]: DEBUG nova.compute.manager [req-a1591c85-fe18-41e7-8b47-a74258c44400 req-7bf93286-38bb-4257-8117-a729cd793f5c service nova] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Received event network-changed-58907f85-0b65-4837-9d8e-da4ed1cf1be6 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 842.954719] env[69994]: DEBUG nova.compute.manager [req-a1591c85-fe18-41e7-8b47-a74258c44400 req-7bf93286-38bb-4257-8117-a729cd793f5c service nova] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Refreshing instance network info cache due to event network-changed-58907f85-0b65-4837-9d8e-da4ed1cf1be6. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 842.954954] env[69994]: DEBUG oslo_concurrency.lockutils [req-a1591c85-fe18-41e7-8b47-a74258c44400 req-7bf93286-38bb-4257-8117-a729cd793f5c service nova] Acquiring lock "refresh_cache-8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.955302] env[69994]: DEBUG oslo_concurrency.lockutils [req-a1591c85-fe18-41e7-8b47-a74258c44400 req-7bf93286-38bb-4257-8117-a729cd793f5c service nova] Acquired lock "refresh_cache-8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 842.955302] env[69994]: DEBUG nova.network.neutron [req-a1591c85-fe18-41e7-8b47-a74258c44400 req-7bf93286-38bb-4257-8117-a729cd793f5c service nova] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Refreshing network info cache for port 58907f85-0b65-4837-9d8e-da4ed1cf1be6 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 842.994915] env[69994]: DEBUG oslo_vmware.api [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Task: {'id': task-3241875, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081792} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.997406] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 842.997406] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-715e294a-72d2-4140-bd39-a5a77739ddb7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.027654] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Reconfiguring VM instance instance-00000038 to attach disk [datastore2] 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8/6d99c52e-8893-4ad7-8d8e-56bd8c9379b8.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 843.032073] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8e290939-a640-48bd-90a4-a283c7adb66a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.056339] env[69994]: DEBUG oslo_vmware.api [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Waiting for the task: (returnval){ [ 843.056339] env[69994]: value = "task-3241876" [ 843.056339] env[69994]: _type = "Task" [ 843.056339] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.073791] env[69994]: DEBUG oslo_vmware.api [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Task: {'id': task-3241876, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.116962] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f6393b9-6e00-441a-96e9-6c20d1ada568 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.132139] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce904f63-d85f-4721-ba7f-86e79c076a57 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.170332] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb477a08-1d01-4371-9180-9d9aea3f4ee9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.177060] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab317a44-dc35-4853-b1dc-ce66d59cbd3e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.191129] env[69994]: DEBUG nova.compute.provider_tree [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 843.404409] env[69994]: DEBUG nova.compute.manager [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 843.442885] env[69994]: DEBUG nova.virt.hardware [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 843.443163] env[69994]: DEBUG nova.virt.hardware [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 843.443326] env[69994]: DEBUG nova.virt.hardware [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 843.443510] env[69994]: DEBUG nova.virt.hardware [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 843.443727] env[69994]: DEBUG nova.virt.hardware [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 843.443887] env[69994]: DEBUG nova.virt.hardware [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 843.444110] env[69994]: DEBUG nova.virt.hardware [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 843.444271] env[69994]: DEBUG nova.virt.hardware [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 843.444459] env[69994]: DEBUG nova.virt.hardware [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 843.444644] env[69994]: DEBUG nova.virt.hardware [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 843.444844] env[69994]: DEBUG nova.virt.hardware [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 843.445777] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6d0b103-9179-4a09-9cec-fad846276e01 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.454351] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-204b409f-2c84-4f8f-87ab-f3caa7947684 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.568145] env[69994]: DEBUG oslo_vmware.api [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Task: {'id': task-3241876, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.695731] env[69994]: DEBUG nova.scheduler.client.report [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 843.772142] env[69994]: DEBUG nova.network.neutron [req-a1591c85-fe18-41e7-8b47-a74258c44400 req-7bf93286-38bb-4257-8117-a729cd793f5c service nova] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Updated VIF entry in instance network info cache for port 58907f85-0b65-4837-9d8e-da4ed1cf1be6. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 843.772142] env[69994]: DEBUG nova.network.neutron [req-a1591c85-fe18-41e7-8b47-a74258c44400 req-7bf93286-38bb-4257-8117-a729cd793f5c service nova] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Updating instance_info_cache with network_info: [{"id": "58907f85-0b65-4837-9d8e-da4ed1cf1be6", "address": "fa:16:3e:bf:d5:a6", "network": {"id": "2e51d0d8-050c-40b5-b20d-b754df62b09e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-955665594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95dff361679f4d3eb08daf6701c7ab82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d31a554-a94c-4471-892f-f65aa87b8279", "external-id": "nsx-vlan-transportzone-241", "segmentation_id": 241, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58907f85-0b", "ovs_interfaceid": "58907f85-0b65-4837-9d8e-da4ed1cf1be6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.070190] env[69994]: DEBUG oslo_vmware.api [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Task: {'id': task-3241876, 'name': ReconfigVM_Task, 'duration_secs': 0.644114} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.070920] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Reconfigured VM instance instance-00000038 to attach disk [datastore2] 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8/6d99c52e-8893-4ad7-8d8e-56bd8c9379b8.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 844.071621] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ce24ae5b-5d2c-4d10-aadb-355c4351af89 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.078467] env[69994]: DEBUG oslo_vmware.api [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Waiting for the task: (returnval){ [ 844.078467] env[69994]: value = "task-3241878" [ 844.078467] env[69994]: _type = "Task" [ 844.078467] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.087714] env[69994]: DEBUG oslo_vmware.api [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Task: {'id': task-3241878, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.203087] env[69994]: DEBUG oslo_concurrency.lockutils [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.833s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 844.203315] env[69994]: INFO nova.compute.manager [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Migrating [ 844.210275] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7c58e575-c144-42ae-b6e3-4c704cf66ca1 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.239s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 844.210518] env[69994]: DEBUG nova.objects.instance [None req-7c58e575-c144-42ae-b6e3-4c704cf66ca1 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Lazy-loading 'resources' on Instance uuid 15d17772-ac57-49a3-b261-bf49b902f658 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 844.273684] env[69994]: DEBUG oslo_concurrency.lockutils [req-a1591c85-fe18-41e7-8b47-a74258c44400 req-7bf93286-38bb-4257-8117-a729cd793f5c service nova] Releasing lock "refresh_cache-8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 844.485028] env[69994]: DEBUG oslo_vmware.rw_handles [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524b7ed1-1fa8-c744-0275-6664f7283c16/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 844.485422] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd931064-b46d-4e5a-965d-6b82bc7d9ec8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.493470] env[69994]: DEBUG oslo_vmware.rw_handles [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524b7ed1-1fa8-c744-0275-6664f7283c16/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 844.493470] env[69994]: ERROR oslo_vmware.rw_handles [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524b7ed1-1fa8-c744-0275-6664f7283c16/disk-0.vmdk due to incomplete transfer. [ 844.493470] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-6bf003a1-4032-4160-8104-71fc63bead60 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.503118] env[69994]: DEBUG oslo_vmware.rw_handles [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524b7ed1-1fa8-c744-0275-6664f7283c16/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 844.503118] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Uploaded image 40acda84-5087-4e7b-a84d-2631d6a2f2c4 to the Glance image server {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 844.504233] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Destroying the VM {{(pid=69994) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 844.504886] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-d510ef87-2a87-4f00-9ad9-7e20b05231b5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.513871] env[69994]: DEBUG oslo_vmware.api [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 844.513871] env[69994]: value = "task-3241879" [ 844.513871] env[69994]: _type = "Task" [ 844.513871] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.520115] env[69994]: DEBUG oslo_vmware.api [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241879, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.588771] env[69994]: DEBUG oslo_vmware.api [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Task: {'id': task-3241878, 'name': Rename_Task, 'duration_secs': 0.343944} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.589109] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 844.589361] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6a167570-a8ab-460e-9d8e-f3559ebc9500 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.595558] env[69994]: DEBUG oslo_vmware.api [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Waiting for the task: (returnval){ [ 844.595558] env[69994]: value = "task-3241880" [ 844.595558] env[69994]: _type = "Task" [ 844.595558] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.604699] env[69994]: DEBUG oslo_vmware.api [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Task: {'id': task-3241880, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.672269] env[69994]: DEBUG nova.compute.manager [req-7abc2749-a9e6-4495-a24d-f5a9d2e6c8d5 req-b9c38960-96d2-4675-ab58-51f9e16a83ec service nova] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Received event network-vif-plugged-b6545d7e-9893-450b-9d3b-67d4d7affbe4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 844.672392] env[69994]: DEBUG oslo_concurrency.lockutils [req-7abc2749-a9e6-4495-a24d-f5a9d2e6c8d5 req-b9c38960-96d2-4675-ab58-51f9e16a83ec service nova] Acquiring lock "ffe5f2c6-69e7-4bdb-80d1-b421b695e790-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 844.672726] env[69994]: DEBUG oslo_concurrency.lockutils [req-7abc2749-a9e6-4495-a24d-f5a9d2e6c8d5 req-b9c38960-96d2-4675-ab58-51f9e16a83ec service nova] Lock "ffe5f2c6-69e7-4bdb-80d1-b421b695e790-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 844.672811] env[69994]: DEBUG oslo_concurrency.lockutils [req-7abc2749-a9e6-4495-a24d-f5a9d2e6c8d5 req-b9c38960-96d2-4675-ab58-51f9e16a83ec service nova] Lock "ffe5f2c6-69e7-4bdb-80d1-b421b695e790-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 844.673022] env[69994]: DEBUG nova.compute.manager [req-7abc2749-a9e6-4495-a24d-f5a9d2e6c8d5 req-b9c38960-96d2-4675-ab58-51f9e16a83ec service nova] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] No waiting events found dispatching network-vif-plugged-b6545d7e-9893-450b-9d3b-67d4d7affbe4 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 844.673269] env[69994]: WARNING nova.compute.manager [req-7abc2749-a9e6-4495-a24d-f5a9d2e6c8d5 req-b9c38960-96d2-4675-ab58-51f9e16a83ec service nova] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Received unexpected event network-vif-plugged-b6545d7e-9893-450b-9d3b-67d4d7affbe4 for instance with vm_state building and task_state spawning. [ 844.722216] env[69994]: DEBUG oslo_concurrency.lockutils [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquiring lock "refresh_cache-f07750f5-3f1d-4d97-98dc-285ed357cc7e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.722404] env[69994]: DEBUG oslo_concurrency.lockutils [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquired lock "refresh_cache-f07750f5-3f1d-4d97-98dc-285ed357cc7e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 844.722580] env[69994]: DEBUG nova.network.neutron [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 844.815685] env[69994]: DEBUG nova.network.neutron [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Successfully updated port: b6545d7e-9893-450b-9d3b-67d4d7affbe4 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 844.913823] env[69994]: DEBUG oslo_concurrency.lockutils [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Acquiring lock "e3697388-4598-4dde-8c20-43fc7665083b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 844.914353] env[69994]: DEBUG oslo_concurrency.lockutils [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Lock "e3697388-4598-4dde-8c20-43fc7665083b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 845.024496] env[69994]: DEBUG oslo_vmware.api [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241879, 'name': Destroy_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.106568] env[69994]: DEBUG oslo_vmware.api [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Task: {'id': task-3241880, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.232964] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5efe1d6f-419f-4226-bcdf-b2c0af87430c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.240992] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abe39fcc-5f59-4f0c-b544-3d289ee84cf2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.274526] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ae4a64c-5428-4121-bba7-c9ae093854a7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.284994] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e064fea-8440-42e7-bf1d-0786f1a18e45 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.299702] env[69994]: DEBUG nova.compute.provider_tree [None req-7c58e575-c144-42ae-b6e3-4c704cf66ca1 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 845.317133] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Acquiring lock "refresh_cache-ffe5f2c6-69e7-4bdb-80d1-b421b695e790" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.317448] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Acquired lock "refresh_cache-ffe5f2c6-69e7-4bdb-80d1-b421b695e790" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 845.317448] env[69994]: DEBUG nova.network.neutron [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 845.525088] env[69994]: DEBUG oslo_vmware.api [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241879, 'name': Destroy_Task, 'duration_secs': 0.520654} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.528540] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Destroyed the VM [ 845.528540] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Deleting Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 845.528540] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-73211161-b5c4-4224-a9b9-0865abf7d2eb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.534937] env[69994]: DEBUG oslo_vmware.api [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 845.534937] env[69994]: value = "task-3241882" [ 845.534937] env[69994]: _type = "Task" [ 845.534937] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.546804] env[69994]: DEBUG oslo_vmware.api [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241882, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.557834] env[69994]: DEBUG nova.network.neutron [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Updating instance_info_cache with network_info: [{"id": "03a2cce0-4737-45b4-8482-4eabd0e63386", "address": "fa:16:3e:fe:86:b3", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.76", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03a2cce0-47", "ovs_interfaceid": "03a2cce0-4737-45b4-8482-4eabd0e63386", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.609752] env[69994]: DEBUG oslo_vmware.api [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Task: {'id': task-3241880, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.803575] env[69994]: DEBUG nova.scheduler.client.report [None req-7c58e575-c144-42ae-b6e3-4c704cf66ca1 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 845.853816] env[69994]: DEBUG nova.network.neutron [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 846.049096] env[69994]: DEBUG oslo_vmware.api [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241882, 'name': RemoveSnapshot_Task} progress is 65%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.060439] env[69994]: DEBUG nova.network.neutron [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Updating instance_info_cache with network_info: [{"id": "b6545d7e-9893-450b-9d3b-67d4d7affbe4", "address": "fa:16:3e:02:fb:94", "network": {"id": "e6b2a322-ac01-400c-a8ec-68a371b2061c", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-141203958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e8b8ab56b87c46f9b960fc3b430197d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6545d7e-98", "ovs_interfaceid": "b6545d7e-9893-450b-9d3b-67d4d7affbe4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.062632] env[69994]: DEBUG oslo_concurrency.lockutils [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Releasing lock "refresh_cache-f07750f5-3f1d-4d97-98dc-285ed357cc7e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 846.111290] env[69994]: DEBUG oslo_vmware.api [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Task: {'id': task-3241880, 'name': PowerOnVM_Task, 'duration_secs': 1.263544} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.111569] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 846.111770] env[69994]: INFO nova.compute.manager [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Took 10.24 seconds to spawn the instance on the hypervisor. [ 846.111950] env[69994]: DEBUG nova.compute.manager [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 846.113077] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61af7317-4e6f-498e-a1e6-17f60c145a5e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.309096] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7c58e575-c144-42ae-b6e3-4c704cf66ca1 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.099s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 846.311568] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.664s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 846.313090] env[69994]: INFO nova.compute.claims [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 846.331803] env[69994]: INFO nova.scheduler.client.report [None req-7c58e575-c144-42ae-b6e3-4c704cf66ca1 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Deleted allocations for instance 15d17772-ac57-49a3-b261-bf49b902f658 [ 846.546481] env[69994]: DEBUG oslo_vmware.api [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241882, 'name': RemoveSnapshot_Task, 'duration_secs': 0.728492} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.546832] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Deleted Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 846.547097] env[69994]: INFO nova.compute.manager [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Took 21.30 seconds to snapshot the instance on the hypervisor. [ 846.564473] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Releasing lock "refresh_cache-ffe5f2c6-69e7-4bdb-80d1-b421b695e790" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 846.564841] env[69994]: DEBUG nova.compute.manager [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Instance network_info: |[{"id": "b6545d7e-9893-450b-9d3b-67d4d7affbe4", "address": "fa:16:3e:02:fb:94", "network": {"id": "e6b2a322-ac01-400c-a8ec-68a371b2061c", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-141203958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e8b8ab56b87c46f9b960fc3b430197d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6545d7e-98", "ovs_interfaceid": "b6545d7e-9893-450b-9d3b-67d4d7affbe4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 846.568766] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:02:fb:94', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '415e68b4-3766-4359-afe2-f8563910d98c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b6545d7e-9893-450b-9d3b-67d4d7affbe4', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 846.576563] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 846.577083] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 846.577316] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b3c5820f-1fa7-44e4-8104-8d661b3156d4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.598513] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 846.598513] env[69994]: value = "task-3241883" [ 846.598513] env[69994]: _type = "Task" [ 846.598513] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.607090] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241883, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.631293] env[69994]: INFO nova.compute.manager [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Took 50.42 seconds to build instance. [ 846.769898] env[69994]: DEBUG nova.compute.manager [req-199632d7-dc6f-4431-abd5-b451b478636e req-4a0178ca-ce90-4a63-ac22-7908e09ff7db service nova] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Received event network-changed-b6545d7e-9893-450b-9d3b-67d4d7affbe4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 846.770122] env[69994]: DEBUG nova.compute.manager [req-199632d7-dc6f-4431-abd5-b451b478636e req-4a0178ca-ce90-4a63-ac22-7908e09ff7db service nova] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Refreshing instance network info cache due to event network-changed-b6545d7e-9893-450b-9d3b-67d4d7affbe4. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 846.770344] env[69994]: DEBUG oslo_concurrency.lockutils [req-199632d7-dc6f-4431-abd5-b451b478636e req-4a0178ca-ce90-4a63-ac22-7908e09ff7db service nova] Acquiring lock "refresh_cache-ffe5f2c6-69e7-4bdb-80d1-b421b695e790" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.770486] env[69994]: DEBUG oslo_concurrency.lockutils [req-199632d7-dc6f-4431-abd5-b451b478636e req-4a0178ca-ce90-4a63-ac22-7908e09ff7db service nova] Acquired lock "refresh_cache-ffe5f2c6-69e7-4bdb-80d1-b421b695e790" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 846.770642] env[69994]: DEBUG nova.network.neutron [req-199632d7-dc6f-4431-abd5-b451b478636e req-4a0178ca-ce90-4a63-ac22-7908e09ff7db service nova] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Refreshing network info cache for port b6545d7e-9893-450b-9d3b-67d4d7affbe4 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 846.842466] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7c58e575-c144-42ae-b6e3-4c704cf66ca1 tempest-AttachInterfacesUnderV243Test-563369127 tempest-AttachInterfacesUnderV243Test-563369127-project-member] Lock "15d17772-ac57-49a3-b261-bf49b902f658" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.536s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 847.110978] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241883, 'name': CreateVM_Task, 'duration_secs': 0.406299} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.113498] env[69994]: DEBUG nova.compute.manager [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Found 3 images (rotation: 2) {{(pid=69994) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 847.113676] env[69994]: DEBUG nova.compute.manager [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Rotating out 1 backups {{(pid=69994) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5025}} [ 847.113834] env[69994]: DEBUG nova.compute.manager [None req-28c5cb67-edc8-4544-9bf5-76948bcad9cc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Deleting image 1cda4692-a9f9-4b00-b35d-3419adcd1881 {{(pid=69994) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5030}} [ 847.115544] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 847.116610] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.116788] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 847.117178] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 847.117542] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc012447-1fc9-4303-9f85-7c7d7238be42 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.123854] env[69994]: DEBUG oslo_vmware.api [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Waiting for the task: (returnval){ [ 847.123854] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52d7d525-0461-f709-e0bc-e27b2cfcebc5" [ 847.123854] env[69994]: _type = "Task" [ 847.123854] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.134458] env[69994]: DEBUG oslo_concurrency.lockutils [None req-75c7e5c8-1a9c-4f21-883e-719edd22facd tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Lock "6d99c52e-8893-4ad7-8d8e-56bd8c9379b8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.898s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 847.134651] env[69994]: DEBUG oslo_vmware.api [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52d7d525-0461-f709-e0bc-e27b2cfcebc5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.587104] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49998188-7cee-4697-bc2c-75b24c175c61 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.610887] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Updating instance 'f07750f5-3f1d-4d97-98dc-285ed357cc7e' progress to 0 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 847.637255] env[69994]: DEBUG nova.compute.manager [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 847.639859] env[69994]: DEBUG oslo_vmware.api [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52d7d525-0461-f709-e0bc-e27b2cfcebc5, 'name': SearchDatastore_Task, 'duration_secs': 0.028374} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.646330] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 847.646452] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 847.646678] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.646843] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 847.647082] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 847.647784] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-33aa2a25-2c76-421b-aacf-afdb741a94de {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.661899] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 847.662180] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 847.663316] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af8d8e4c-e455-43bf-a8b3-575ec2f52b79 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.671012] env[69994]: DEBUG oslo_vmware.api [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Waiting for the task: (returnval){ [ 847.671012] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52b49d0f-2c83-556d-6b22-b3f299bac155" [ 847.671012] env[69994]: _type = "Task" [ 847.671012] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.685851] env[69994]: DEBUG oslo_vmware.api [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52b49d0f-2c83-556d-6b22-b3f299bac155, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.731756] env[69994]: DEBUG nova.network.neutron [req-199632d7-dc6f-4431-abd5-b451b478636e req-4a0178ca-ce90-4a63-ac22-7908e09ff7db service nova] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Updated VIF entry in instance network info cache for port b6545d7e-9893-450b-9d3b-67d4d7affbe4. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 847.732439] env[69994]: DEBUG nova.network.neutron [req-199632d7-dc6f-4431-abd5-b451b478636e req-4a0178ca-ce90-4a63-ac22-7908e09ff7db service nova] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Updating instance_info_cache with network_info: [{"id": "b6545d7e-9893-450b-9d3b-67d4d7affbe4", "address": "fa:16:3e:02:fb:94", "network": {"id": "e6b2a322-ac01-400c-a8ec-68a371b2061c", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-141203958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e8b8ab56b87c46f9b960fc3b430197d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6545d7e-98", "ovs_interfaceid": "b6545d7e-9893-450b-9d3b-67d4d7affbe4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.886935] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-113e8796-df65-494c-9e1c-bc65339b0c99 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.898462] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-882db254-92b2-4841-8e87-3f148e28a6da {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.936277] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38ac0ef4-fe6e-418d-b7cb-b6aebf6ed88c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.944931] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-188fdb85-991e-4031-b6d2-5acce66479cd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.960152] env[69994]: DEBUG nova.compute.provider_tree [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 848.122491] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc89c678-31d3-42e0-89c9-075507c387ab tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Acquiring lock "6d99c52e-8893-4ad7-8d8e-56bd8c9379b8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 848.122491] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc89c678-31d3-42e0-89c9-075507c387ab tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Lock "6d99c52e-8893-4ad7-8d8e-56bd8c9379b8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 848.122491] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc89c678-31d3-42e0-89c9-075507c387ab tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Acquiring lock "6d99c52e-8893-4ad7-8d8e-56bd8c9379b8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 848.122491] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc89c678-31d3-42e0-89c9-075507c387ab tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Lock "6d99c52e-8893-4ad7-8d8e-56bd8c9379b8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 848.122491] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc89c678-31d3-42e0-89c9-075507c387ab tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Lock "6d99c52e-8893-4ad7-8d8e-56bd8c9379b8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 848.125479] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 848.126234] env[69994]: INFO nova.compute.manager [None req-bc89c678-31d3-42e0-89c9-075507c387ab tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Terminating instance [ 848.127631] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e1967343-8d75-4c77-9570-6d82996fd7f0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.137722] env[69994]: DEBUG oslo_vmware.api [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for the task: (returnval){ [ 848.137722] env[69994]: value = "task-3241885" [ 848.137722] env[69994]: _type = "Task" [ 848.137722] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.154028] env[69994]: DEBUG oslo_vmware.api [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241885, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.160111] env[69994]: DEBUG oslo_concurrency.lockutils [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 848.182853] env[69994]: DEBUG oslo_vmware.api [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52b49d0f-2c83-556d-6b22-b3f299bac155, 'name': SearchDatastore_Task, 'duration_secs': 0.01568} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.183676] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0e1bee3-0634-406e-b2d7-4e49581107e7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.190505] env[69994]: DEBUG oslo_vmware.api [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Waiting for the task: (returnval){ [ 848.190505] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52b8be8f-337d-bcc8-030a-3650b666f81f" [ 848.190505] env[69994]: _type = "Task" [ 848.190505] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.201948] env[69994]: DEBUG oslo_vmware.api [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52b8be8f-337d-bcc8-030a-3650b666f81f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.238989] env[69994]: DEBUG oslo_concurrency.lockutils [req-199632d7-dc6f-4431-abd5-b451b478636e req-4a0178ca-ce90-4a63-ac22-7908e09ff7db service nova] Releasing lock "refresh_cache-ffe5f2c6-69e7-4bdb-80d1-b421b695e790" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 848.463742] env[69994]: DEBUG nova.scheduler.client.report [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 848.632697] env[69994]: DEBUG nova.compute.manager [None req-bc89c678-31d3-42e0-89c9-075507c387ab tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 848.633091] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bc89c678-31d3-42e0-89c9-075507c387ab tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 848.633892] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dcfed96-8334-4b10-9b6e-7ec6aed64019 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.644545] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc89c678-31d3-42e0-89c9-075507c387ab tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 848.645438] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7ff58675-1c24-4000-b7c5-c0283086ff17 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.651055] env[69994]: DEBUG oslo_vmware.api [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241885, 'name': PowerOffVM_Task, 'duration_secs': 0.180619} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.652363] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 848.652636] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Updating instance 'f07750f5-3f1d-4d97-98dc-285ed357cc7e' progress to 17 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 848.656514] env[69994]: DEBUG oslo_vmware.api [None req-bc89c678-31d3-42e0-89c9-075507c387ab tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Waiting for the task: (returnval){ [ 848.656514] env[69994]: value = "task-3241886" [ 848.656514] env[69994]: _type = "Task" [ 848.656514] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.669065] env[69994]: DEBUG oslo_vmware.api [None req-bc89c678-31d3-42e0-89c9-075507c387ab tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Task: {'id': task-3241886, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.701866] env[69994]: DEBUG oslo_vmware.api [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52b8be8f-337d-bcc8-030a-3650b666f81f, 'name': SearchDatastore_Task, 'duration_secs': 0.01438} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.703032] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 848.703967] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] ffe5f2c6-69e7-4bdb-80d1-b421b695e790/ffe5f2c6-69e7-4bdb-80d1-b421b695e790.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 848.704582] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ac1978cd-d2f2-49fe-a08a-87f51ebc1b37 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.714524] env[69994]: DEBUG oslo_vmware.api [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Waiting for the task: (returnval){ [ 848.714524] env[69994]: value = "task-3241887" [ 848.714524] env[69994]: _type = "Task" [ 848.714524] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.723658] env[69994]: DEBUG oslo_vmware.api [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3241887, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.974541] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.660s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 848.974541] env[69994]: DEBUG nova.compute.manager [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 848.976622] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a8da9ed0-3652-42fb-9a0e-e79c4f574785 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.721s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 848.976622] env[69994]: DEBUG nova.objects.instance [None req-a8da9ed0-3652-42fb-9a0e-e79c4f574785 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Lazy-loading 'resources' on Instance uuid e4013007-fd79-4d70-a9d1-70a4c621c0ea {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 849.160017] env[69994]: DEBUG nova.virt.hardware [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 849.160378] env[69994]: DEBUG nova.virt.hardware [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 849.160559] env[69994]: DEBUG nova.virt.hardware [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 849.160746] env[69994]: DEBUG nova.virt.hardware [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 849.160895] env[69994]: DEBUG nova.virt.hardware [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 849.161118] env[69994]: DEBUG nova.virt.hardware [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 849.161410] env[69994]: DEBUG nova.virt.hardware [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 849.162285] env[69994]: DEBUG nova.virt.hardware [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 849.162502] env[69994]: DEBUG nova.virt.hardware [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 849.162684] env[69994]: DEBUG nova.virt.hardware [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 849.162867] env[69994]: DEBUG nova.virt.hardware [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 849.168174] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8c8da2d3-8064-465f-b4fd-8c30146739bf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.188882] env[69994]: DEBUG oslo_vmware.api [None req-bc89c678-31d3-42e0-89c9-075507c387ab tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Task: {'id': task-3241886, 'name': PowerOffVM_Task, 'duration_secs': 0.188234} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.190276] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc89c678-31d3-42e0-89c9-075507c387ab tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 849.190461] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bc89c678-31d3-42e0-89c9-075507c387ab tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 849.190781] env[69994]: DEBUG oslo_vmware.api [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for the task: (returnval){ [ 849.190781] env[69994]: value = "task-3241888" [ 849.190781] env[69994]: _type = "Task" [ 849.190781] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.191082] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-df607c28-5b85-4359-bac7-3b3f4efb4d62 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.202785] env[69994]: DEBUG oslo_vmware.api [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241888, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.224838] env[69994]: DEBUG oslo_vmware.api [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3241887, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.48257} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.225177] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] ffe5f2c6-69e7-4bdb-80d1-b421b695e790/ffe5f2c6-69e7-4bdb-80d1-b421b695e790.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 849.225398] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 849.225710] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-762d439a-c598-4b59-b411-a65ec1d30011 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.233401] env[69994]: DEBUG oslo_vmware.api [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Waiting for the task: (returnval){ [ 849.233401] env[69994]: value = "task-3241890" [ 849.233401] env[69994]: _type = "Task" [ 849.233401] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.243707] env[69994]: DEBUG oslo_vmware.api [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3241890, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.272694] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bc89c678-31d3-42e0-89c9-075507c387ab tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 849.272946] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bc89c678-31d3-42e0-89c9-075507c387ab tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 849.273179] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc89c678-31d3-42e0-89c9-075507c387ab tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Deleting the datastore file [datastore2] 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 849.273453] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-81a67e82-5500-4492-8585-92f795f6b635 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.281653] env[69994]: DEBUG oslo_vmware.api [None req-bc89c678-31d3-42e0-89c9-075507c387ab tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Waiting for the task: (returnval){ [ 849.281653] env[69994]: value = "task-3241891" [ 849.281653] env[69994]: _type = "Task" [ 849.281653] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.291456] env[69994]: DEBUG oslo_vmware.api [None req-bc89c678-31d3-42e0-89c9-075507c387ab tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Task: {'id': task-3241891, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.479833] env[69994]: DEBUG nova.compute.utils [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 849.486700] env[69994]: DEBUG nova.compute.manager [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 849.486700] env[69994]: DEBUG nova.network.neutron [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 849.580672] env[69994]: DEBUG nova.policy [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7048717682204eb59697716a973c356b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '91fa55bf90ff43a8b255a1e2fa2c22be', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 849.705360] env[69994]: DEBUG oslo_vmware.api [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241888, 'name': ReconfigVM_Task, 'duration_secs': 0.209933} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.705656] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Updating instance 'f07750f5-3f1d-4d97-98dc-285ed357cc7e' progress to 33 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 849.747143] env[69994]: DEBUG oslo_vmware.api [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3241890, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082715} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.747570] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 849.748410] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08a31d4b-e417-4e5b-bb63-f7d39649a49e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.779289] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] ffe5f2c6-69e7-4bdb-80d1-b421b695e790/ffe5f2c6-69e7-4bdb-80d1-b421b695e790.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 849.782970] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bc241283-d53d-4a9d-98b6-91a0b0d44f7e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.811221] env[69994]: DEBUG oslo_vmware.api [None req-bc89c678-31d3-42e0-89c9-075507c387ab tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Task: {'id': task-3241891, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156119} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.812691] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc89c678-31d3-42e0-89c9-075507c387ab tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 849.812888] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bc89c678-31d3-42e0-89c9-075507c387ab tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 849.813113] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bc89c678-31d3-42e0-89c9-075507c387ab tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 849.813342] env[69994]: INFO nova.compute.manager [None req-bc89c678-31d3-42e0-89c9-075507c387ab tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Took 1.18 seconds to destroy the instance on the hypervisor. [ 849.813632] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bc89c678-31d3-42e0-89c9-075507c387ab tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 849.813908] env[69994]: DEBUG oslo_vmware.api [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Waiting for the task: (returnval){ [ 849.813908] env[69994]: value = "task-3241893" [ 849.813908] env[69994]: _type = "Task" [ 849.813908] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.816548] env[69994]: DEBUG nova.compute.manager [-] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 849.816629] env[69994]: DEBUG nova.network.neutron [-] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 849.827686] env[69994]: DEBUG oslo_vmware.api [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3241893, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.846189] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4ef56bff-5135-4dc6-8782-59bba4c84ffc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "45a8dced-6c49-441c-92e2-ee323ed8753c" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 849.847058] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4ef56bff-5135-4dc6-8782-59bba4c84ffc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "45a8dced-6c49-441c-92e2-ee323ed8753c" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 849.847058] env[69994]: DEBUG nova.compute.manager [None req-4ef56bff-5135-4dc6-8782-59bba4c84ffc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 849.850456] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc03ea7f-a3da-408d-a345-74b10c028ad0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.857780] env[69994]: DEBUG nova.compute.manager [None req-4ef56bff-5135-4dc6-8782-59bba4c84ffc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69994) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 849.858703] env[69994]: DEBUG nova.objects.instance [None req-4ef56bff-5135-4dc6-8782-59bba4c84ffc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lazy-loading 'flavor' on Instance uuid 45a8dced-6c49-441c-92e2-ee323ed8753c {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 849.985369] env[69994]: DEBUG nova.compute.manager [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 850.112664] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-add6b672-3cdc-4e4a-8b31-3dff40d67fb1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.129320] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8270f9ec-552e-4a1c-8cd3-998001827d96 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.167172] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1b1967d-d740-4ee3-ad8f-639e889707c0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.172186] env[69994]: DEBUG nova.compute.manager [req-eedd54c6-424c-4bca-a457-f6d2eea7fe76 req-9db673db-b1d2-4437-8022-cceb281c1f41 service nova] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Received event network-vif-deleted-aa1e87cd-049a-4e57-bca5-4a5c7f144c90 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 850.172186] env[69994]: INFO nova.compute.manager [req-eedd54c6-424c-4bca-a457-f6d2eea7fe76 req-9db673db-b1d2-4437-8022-cceb281c1f41 service nova] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Neutron deleted interface aa1e87cd-049a-4e57-bca5-4a5c7f144c90; detaching it from the instance and deleting it from the info cache [ 850.172186] env[69994]: DEBUG nova.network.neutron [req-eedd54c6-424c-4bca-a457-f6d2eea7fe76 req-9db673db-b1d2-4437-8022-cceb281c1f41 service nova] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.180805] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9de52ff-e229-43ad-9a6c-d8e7e815a950 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.199545] env[69994]: DEBUG nova.compute.provider_tree [None req-a8da9ed0-3652-42fb-9a0e-e79c4f574785 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 850.217974] env[69994]: DEBUG nova.virt.hardware [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:40:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='60367b47-c076-4b83-be63-6ff8f43248be',id=35,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-590586289',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 850.218361] env[69994]: DEBUG nova.virt.hardware [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 850.218534] env[69994]: DEBUG nova.virt.hardware [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 850.218790] env[69994]: DEBUG nova.virt.hardware [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 850.219032] env[69994]: DEBUG nova.virt.hardware [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 850.219226] env[69994]: DEBUG nova.virt.hardware [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 850.219451] env[69994]: DEBUG nova.virt.hardware [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 850.219613] env[69994]: DEBUG nova.virt.hardware [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 850.219866] env[69994]: DEBUG nova.virt.hardware [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 850.220091] env[69994]: DEBUG nova.virt.hardware [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 850.220286] env[69994]: DEBUG nova.virt.hardware [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 850.232415] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Reconfiguring VM instance instance-0000002f to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 850.232415] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dc43570d-ecf2-4bc7-8bf5-d72a11152d3a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.254826] env[69994]: DEBUG oslo_vmware.api [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for the task: (returnval){ [ 850.254826] env[69994]: value = "task-3241894" [ 850.254826] env[69994]: _type = "Task" [ 850.254826] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.308601] env[69994]: DEBUG oslo_vmware.api [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241894, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.328980] env[69994]: DEBUG oslo_vmware.api [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3241893, 'name': ReconfigVM_Task, 'duration_secs': 0.421366} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.329692] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Reconfigured VM instance instance-00000039 to attach disk [datastore1] ffe5f2c6-69e7-4bdb-80d1-b421b695e790/ffe5f2c6-69e7-4bdb-80d1-b421b695e790.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 850.330394] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2945ed84-ed4d-46b2-92e0-7283f1db3d29 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.338208] env[69994]: DEBUG oslo_vmware.api [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Waiting for the task: (returnval){ [ 850.338208] env[69994]: value = "task-3241895" [ 850.338208] env[69994]: _type = "Task" [ 850.338208] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.346921] env[69994]: DEBUG oslo_vmware.api [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3241895, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.367859] env[69994]: DEBUG nova.network.neutron [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Successfully created port: 8113f947-c14b-4c98-9e15-99bdde32f01c {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 850.638827] env[69994]: DEBUG nova.network.neutron [-] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.674480] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-28a9bde5-986b-4ee9-8e88-8a5cf785466a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.688318] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df1140ce-bb3b-4276-8a9a-c88b2950ea2d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.702547] env[69994]: DEBUG nova.scheduler.client.report [None req-a8da9ed0-3652-42fb-9a0e-e79c4f574785 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 850.731930] env[69994]: DEBUG nova.compute.manager [req-eedd54c6-424c-4bca-a457-f6d2eea7fe76 req-9db673db-b1d2-4437-8022-cceb281c1f41 service nova] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Detach interface failed, port_id=aa1e87cd-049a-4e57-bca5-4a5c7f144c90, reason: Instance 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 850.765346] env[69994]: DEBUG oslo_vmware.api [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241894, 'name': ReconfigVM_Task, 'duration_secs': 0.210495} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.765628] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Reconfigured VM instance instance-0000002f to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 850.766546] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-715cd159-3e29-4291-a628-c27a043e9a02 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.791959] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] f07750f5-3f1d-4d97-98dc-285ed357cc7e/f07750f5-3f1d-4d97-98dc-285ed357cc7e.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 850.792297] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-979e432a-1665-40f9-8c29-b46506d5db2a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.817996] env[69994]: DEBUG oslo_vmware.api [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for the task: (returnval){ [ 850.817996] env[69994]: value = "task-3241896" [ 850.817996] env[69994]: _type = "Task" [ 850.817996] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.826462] env[69994]: DEBUG oslo_vmware.api [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241896, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.848269] env[69994]: DEBUG oslo_vmware.api [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3241895, 'name': Rename_Task, 'duration_secs': 0.169071} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.848558] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 850.848826] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bb2ddcfa-dfdf-47c3-a490-808c96978968 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.856142] env[69994]: DEBUG oslo_vmware.api [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Waiting for the task: (returnval){ [ 850.856142] env[69994]: value = "task-3241897" [ 850.856142] env[69994]: _type = "Task" [ 850.856142] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.866361] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ef56bff-5135-4dc6-8782-59bba4c84ffc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 850.866603] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e0adce48-f57e-42e6-bf0d-91052207f43c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.871229] env[69994]: DEBUG oslo_vmware.api [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3241897, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.878033] env[69994]: DEBUG oslo_vmware.api [None req-4ef56bff-5135-4dc6-8782-59bba4c84ffc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 850.878033] env[69994]: value = "task-3241898" [ 850.878033] env[69994]: _type = "Task" [ 850.878033] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.888199] env[69994]: DEBUG oslo_vmware.api [None req-4ef56bff-5135-4dc6-8782-59bba4c84ffc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241898, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.999112] env[69994]: DEBUG nova.compute.manager [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 851.031324] env[69994]: DEBUG nova.virt.hardware [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 851.031617] env[69994]: DEBUG nova.virt.hardware [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 851.031783] env[69994]: DEBUG nova.virt.hardware [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 851.031990] env[69994]: DEBUG nova.virt.hardware [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 851.032164] env[69994]: DEBUG nova.virt.hardware [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 851.032345] env[69994]: DEBUG nova.virt.hardware [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 851.032559] env[69994]: DEBUG nova.virt.hardware [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 851.032723] env[69994]: DEBUG nova.virt.hardware [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 851.032922] env[69994]: DEBUG nova.virt.hardware [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 851.033129] env[69994]: DEBUG nova.virt.hardware [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 851.033333] env[69994]: DEBUG nova.virt.hardware [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 851.034274] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1e46cf0-9b4d-462a-aa6f-e2f2be60ddac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.044012] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71f404f6-012d-4fa0-9b6f-b1d3c2c89a18 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.141777] env[69994]: INFO nova.compute.manager [-] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Took 1.32 seconds to deallocate network for instance. [ 851.207982] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a8da9ed0-3652-42fb-9a0e-e79c4f574785 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.232s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 851.211044] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.746s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 851.211947] env[69994]: INFO nova.compute.claims [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 851.328356] env[69994]: DEBUG oslo_vmware.api [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241896, 'name': ReconfigVM_Task, 'duration_secs': 0.473102} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.328640] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Reconfigured VM instance instance-0000002f to attach disk [datastore1] f07750f5-3f1d-4d97-98dc-285ed357cc7e/f07750f5-3f1d-4d97-98dc-285ed357cc7e.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 851.328947] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Updating instance 'f07750f5-3f1d-4d97-98dc-285ed357cc7e' progress to 50 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 851.368242] env[69994]: DEBUG oslo_vmware.api [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3241897, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.371264] env[69994]: INFO nova.scheduler.client.report [None req-a8da9ed0-3652-42fb-9a0e-e79c4f574785 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Deleted allocations for instance e4013007-fd79-4d70-a9d1-70a4c621c0ea [ 851.391670] env[69994]: DEBUG oslo_vmware.api [None req-4ef56bff-5135-4dc6-8782-59bba4c84ffc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3241898, 'name': PowerOffVM_Task, 'duration_secs': 0.241648} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.392093] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ef56bff-5135-4dc6-8782-59bba4c84ffc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 851.392725] env[69994]: DEBUG nova.compute.manager [None req-4ef56bff-5135-4dc6-8782-59bba4c84ffc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 851.393128] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b952c5bc-15da-4d07-a3a1-7bdf683ffb0f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.649691] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc89c678-31d3-42e0-89c9-075507c387ab tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 851.835745] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73b1543c-eaec-4147-bd0a-943bbffbe67b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.857080] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d6e3e30-5f63-4a06-9063-1ae107696814 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.868645] env[69994]: DEBUG oslo_vmware.api [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3241897, 'name': PowerOnVM_Task, 'duration_secs': 0.573918} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.882917] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 851.882917] env[69994]: INFO nova.compute.manager [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Took 8.48 seconds to spawn the instance on the hypervisor. [ 851.882917] env[69994]: DEBUG nova.compute.manager [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 851.885013] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Updating instance 'f07750f5-3f1d-4d97-98dc-285ed357cc7e' progress to 67 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 851.888345] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a8da9ed0-3652-42fb-9a0e-e79c4f574785 tempest-ListImageFiltersTestJSON-438347016 tempest-ListImageFiltersTestJSON-438347016-project-member] Lock "e4013007-fd79-4d70-a9d1-70a4c621c0ea" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.144s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 851.893426] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fb90288-05cb-4a31-b6e8-6906d96b016f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.907685] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4ef56bff-5135-4dc6-8782-59bba4c84ffc tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "45a8dced-6c49-441c-92e2-ee323ed8753c" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.061s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 852.374224] env[69994]: DEBUG nova.network.neutron [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Successfully updated port: 8113f947-c14b-4c98-9e15-99bdde32f01c {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 852.421425] env[69994]: INFO nova.compute.manager [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Took 49.10 seconds to build instance. [ 852.521836] env[69994]: DEBUG nova.compute.manager [req-fb60b01e-483e-452b-aa01-ea9fd790b9cc req-973c40cd-90b9-4fd7-a325-02b4a4772047 service nova] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Received event network-vif-plugged-8113f947-c14b-4c98-9e15-99bdde32f01c {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 852.521836] env[69994]: DEBUG oslo_concurrency.lockutils [req-fb60b01e-483e-452b-aa01-ea9fd790b9cc req-973c40cd-90b9-4fd7-a325-02b4a4772047 service nova] Acquiring lock "cd5a47f2-147b-4e50-980d-8e1c40bc7594-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 852.521836] env[69994]: DEBUG oslo_concurrency.lockutils [req-fb60b01e-483e-452b-aa01-ea9fd790b9cc req-973c40cd-90b9-4fd7-a325-02b4a4772047 service nova] Lock "cd5a47f2-147b-4e50-980d-8e1c40bc7594-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 852.521836] env[69994]: DEBUG oslo_concurrency.lockutils [req-fb60b01e-483e-452b-aa01-ea9fd790b9cc req-973c40cd-90b9-4fd7-a325-02b4a4772047 service nova] Lock "cd5a47f2-147b-4e50-980d-8e1c40bc7594-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 852.521836] env[69994]: DEBUG nova.compute.manager [req-fb60b01e-483e-452b-aa01-ea9fd790b9cc req-973c40cd-90b9-4fd7-a325-02b4a4772047 service nova] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] No waiting events found dispatching network-vif-plugged-8113f947-c14b-4c98-9e15-99bdde32f01c {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 852.521836] env[69994]: WARNING nova.compute.manager [req-fb60b01e-483e-452b-aa01-ea9fd790b9cc req-973c40cd-90b9-4fd7-a325-02b4a4772047 service nova] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Received unexpected event network-vif-plugged-8113f947-c14b-4c98-9e15-99bdde32f01c for instance with vm_state building and task_state spawning. [ 852.544896] env[69994]: DEBUG nova.network.neutron [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Port 03a2cce0-4737-45b4-8482-4eabd0e63386 binding to destination host cpu-1 is already ACTIVE {{(pid=69994) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 852.879335] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquiring lock "refresh_cache-cd5a47f2-147b-4e50-980d-8e1c40bc7594" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.879641] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquired lock "refresh_cache-cd5a47f2-147b-4e50-980d-8e1c40bc7594" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 852.879764] env[69994]: DEBUG nova.network.neutron [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 852.897567] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7217e158-c2cd-4c58-a94e-cbc6937290bf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.912317] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef02d727-ea40-4d8a-8e5a-67eb812a1c6a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.943918] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd74eb34-6793-4be9-af93-d286221526d4 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Lock "ffe5f2c6-69e7-4bdb-80d1-b421b695e790" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.658s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 852.945301] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-275bedf3-0256-4b43-8d80-c8c465eff341 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.954341] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8f8565f-880f-4905-b48c-5a9b9f4cc1af {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.970492] env[69994]: DEBUG nova.compute.provider_tree [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 853.436769] env[69994]: DEBUG nova.network.neutron [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 853.477342] env[69994]: DEBUG nova.scheduler.client.report [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 853.567495] env[69994]: DEBUG oslo_concurrency.lockutils [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquiring lock "f07750f5-3f1d-4d97-98dc-285ed357cc7e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 853.567750] env[69994]: DEBUG oslo_concurrency.lockutils [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Lock "f07750f5-3f1d-4d97-98dc-285ed357cc7e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 853.567923] env[69994]: DEBUG oslo_concurrency.lockutils [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Lock "f07750f5-3f1d-4d97-98dc-285ed357cc7e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 853.704832] env[69994]: DEBUG nova.network.neutron [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Updating instance_info_cache with network_info: [{"id": "8113f947-c14b-4c98-9e15-99bdde32f01c", "address": "fa:16:3e:1d:df:36", "network": {"id": "200c7470-668b-414c-81ff-38c361914b33", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1520904988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91fa55bf90ff43a8b255a1e2fa2c22be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8113f947-c1", "ovs_interfaceid": "8113f947-c14b-4c98-9e15-99bdde32f01c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.832856] env[69994]: DEBUG nova.compute.manager [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Stashing vm_state: stopped {{(pid=69994) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 853.983630] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.773s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 853.984224] env[69994]: DEBUG nova.compute.manager [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 853.991019] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c183e7a-280d-4525-97b0-279e2a56dafb tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.408s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 853.991019] env[69994]: DEBUG nova.objects.instance [None req-6c183e7a-280d-4525-97b0-279e2a56dafb tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Lazy-loading 'resources' on Instance uuid d4f87534-813e-4ff6-8b1f-ee23cb0b8e80 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 854.210601] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Releasing lock "refresh_cache-cd5a47f2-147b-4e50-980d-8e1c40bc7594" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 854.210988] env[69994]: DEBUG nova.compute.manager [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Instance network_info: |[{"id": "8113f947-c14b-4c98-9e15-99bdde32f01c", "address": "fa:16:3e:1d:df:36", "network": {"id": "200c7470-668b-414c-81ff-38c361914b33", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1520904988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91fa55bf90ff43a8b255a1e2fa2c22be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8113f947-c1", "ovs_interfaceid": "8113f947-c14b-4c98-9e15-99bdde32f01c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 854.211505] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1d:df:36', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8113f947-c14b-4c98-9e15-99bdde32f01c', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 854.220586] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 854.220828] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 854.221467] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9a4cf28c-9c7c-4762-97cf-1c837f6acc92 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.244151] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 854.244151] env[69994]: value = "task-3241901" [ 854.244151] env[69994]: _type = "Task" [ 854.244151] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.253155] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241901, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.361164] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 854.494784] env[69994]: DEBUG nova.compute.utils [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 854.497583] env[69994]: DEBUG nova.compute.manager [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 854.497790] env[69994]: DEBUG nova.network.neutron [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 854.594804] env[69994]: DEBUG nova.policy [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7048717682204eb59697716a973c356b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '91fa55bf90ff43a8b255a1e2fa2c22be', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 854.662605] env[69994]: DEBUG nova.compute.manager [req-ada5a668-a110-493c-9f4f-b825be12eb0b req-45e48b3d-2385-4774-afc2-c7dd1acf824c service nova] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Received event network-changed-8113f947-c14b-4c98-9e15-99bdde32f01c {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 854.662605] env[69994]: DEBUG nova.compute.manager [req-ada5a668-a110-493c-9f4f-b825be12eb0b req-45e48b3d-2385-4774-afc2-c7dd1acf824c service nova] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Refreshing instance network info cache due to event network-changed-8113f947-c14b-4c98-9e15-99bdde32f01c. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 854.662605] env[69994]: DEBUG oslo_concurrency.lockutils [req-ada5a668-a110-493c-9f4f-b825be12eb0b req-45e48b3d-2385-4774-afc2-c7dd1acf824c service nova] Acquiring lock "refresh_cache-cd5a47f2-147b-4e50-980d-8e1c40bc7594" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.662605] env[69994]: DEBUG oslo_concurrency.lockutils [req-ada5a668-a110-493c-9f4f-b825be12eb0b req-45e48b3d-2385-4774-afc2-c7dd1acf824c service nova] Acquired lock "refresh_cache-cd5a47f2-147b-4e50-980d-8e1c40bc7594" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 854.662605] env[69994]: DEBUG nova.network.neutron [req-ada5a668-a110-493c-9f4f-b825be12eb0b req-45e48b3d-2385-4774-afc2-c7dd1acf824c service nova] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Refreshing network info cache for port 8113f947-c14b-4c98-9e15-99bdde32f01c {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 854.685311] env[69994]: DEBUG oslo_concurrency.lockutils [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquiring lock "refresh_cache-f07750f5-3f1d-4d97-98dc-285ed357cc7e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.685486] env[69994]: DEBUG oslo_concurrency.lockutils [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquired lock "refresh_cache-f07750f5-3f1d-4d97-98dc-285ed357cc7e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 854.685713] env[69994]: DEBUG nova.network.neutron [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 854.761341] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241901, 'name': CreateVM_Task, 'duration_secs': 0.393888} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.761341] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 854.761341] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.761341] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 854.761341] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 854.761341] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df513609-cbd3-40bb-8dd7-82d742ad2626 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.774665] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 854.774665] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]520bb07f-ff97-a67f-dacd-a64e694edab9" [ 854.774665] env[69994]: _type = "Task" [ 854.774665] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.786065] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]520bb07f-ff97-a67f-dacd-a64e694edab9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.005677] env[69994]: DEBUG nova.compute.manager [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 855.030017] env[69994]: DEBUG nova.network.neutron [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Successfully created port: 77897887-9eb8-476e-82c5-c0ab7f57adad {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 855.056010] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6fb75ae-cde8-483f-9271-5cf91062a077 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.066235] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1954724-ced7-4e19-8864-b5ef0a280e0c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.099805] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aa5605e-8d9c-48d1-87d0-2d45d9cd1060 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.109137] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1157cdd3-89f8-492e-9b02-838f1d92f5e9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.129046] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Acquiring lock "c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 855.129354] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Lock "c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 855.130662] env[69994]: DEBUG nova.compute.provider_tree [None req-6c183e7a-280d-4525-97b0-279e2a56dafb tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 855.237049] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8b79b1eb-c143-483e-a9f2-4eb3cb520791 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Acquiring lock "ffe5f2c6-69e7-4bdb-80d1-b421b695e790" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 855.237319] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8b79b1eb-c143-483e-a9f2-4eb3cb520791 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Lock "ffe5f2c6-69e7-4bdb-80d1-b421b695e790" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 855.237543] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8b79b1eb-c143-483e-a9f2-4eb3cb520791 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Acquiring lock "ffe5f2c6-69e7-4bdb-80d1-b421b695e790-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 855.237735] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8b79b1eb-c143-483e-a9f2-4eb3cb520791 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Lock "ffe5f2c6-69e7-4bdb-80d1-b421b695e790-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 855.237898] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8b79b1eb-c143-483e-a9f2-4eb3cb520791 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Lock "ffe5f2c6-69e7-4bdb-80d1-b421b695e790-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 855.240182] env[69994]: INFO nova.compute.manager [None req-8b79b1eb-c143-483e-a9f2-4eb3cb520791 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Terminating instance [ 855.285999] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]520bb07f-ff97-a67f-dacd-a64e694edab9, 'name': SearchDatastore_Task, 'duration_secs': 0.046285} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.288988] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 855.289359] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 855.289556] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.289734] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 855.289971] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 855.290566] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f67ff71c-d02b-4e0e-9080-4d9926c7fe93 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.300839] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 855.301138] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 855.301891] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2dbe2912-c4cc-4804-84f3-5a2a1330e37b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.308486] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 855.308486] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52876c8a-1501-7ec9-d274-7e36d9149e3b" [ 855.308486] env[69994]: _type = "Task" [ 855.308486] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.316601] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52876c8a-1501-7ec9-d274-7e36d9149e3b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.604469] env[69994]: DEBUG nova.network.neutron [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Updating instance_info_cache with network_info: [{"id": "03a2cce0-4737-45b4-8482-4eabd0e63386", "address": "fa:16:3e:fe:86:b3", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.76", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03a2cce0-47", "ovs_interfaceid": "03a2cce0-4737-45b4-8482-4eabd0e63386", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.613346] env[69994]: DEBUG nova.network.neutron [req-ada5a668-a110-493c-9f4f-b825be12eb0b req-45e48b3d-2385-4774-afc2-c7dd1acf824c service nova] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Updated VIF entry in instance network info cache for port 8113f947-c14b-4c98-9e15-99bdde32f01c. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 855.613685] env[69994]: DEBUG nova.network.neutron [req-ada5a668-a110-493c-9f4f-b825be12eb0b req-45e48b3d-2385-4774-afc2-c7dd1acf824c service nova] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Updating instance_info_cache with network_info: [{"id": "8113f947-c14b-4c98-9e15-99bdde32f01c", "address": "fa:16:3e:1d:df:36", "network": {"id": "200c7470-668b-414c-81ff-38c361914b33", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1520904988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91fa55bf90ff43a8b255a1e2fa2c22be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8113f947-c1", "ovs_interfaceid": "8113f947-c14b-4c98-9e15-99bdde32f01c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.632304] env[69994]: DEBUG nova.compute.manager [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 855.635651] env[69994]: DEBUG nova.scheduler.client.report [None req-6c183e7a-280d-4525-97b0-279e2a56dafb tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 855.744479] env[69994]: DEBUG nova.compute.manager [None req-8b79b1eb-c143-483e-a9f2-4eb3cb520791 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 855.744723] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8b79b1eb-c143-483e-a9f2-4eb3cb520791 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 855.745663] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f3f30ca-4175-4350-aed8-08896b06b70a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.755049] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b79b1eb-c143-483e-a9f2-4eb3cb520791 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 855.755311] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6e6a2b14-e2f6-4926-86f9-9066997df39b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.764029] env[69994]: DEBUG oslo_vmware.api [None req-8b79b1eb-c143-483e-a9f2-4eb3cb520791 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Waiting for the task: (returnval){ [ 855.764029] env[69994]: value = "task-3241902" [ 855.764029] env[69994]: _type = "Task" [ 855.764029] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.775282] env[69994]: DEBUG oslo_vmware.api [None req-8b79b1eb-c143-483e-a9f2-4eb3cb520791 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3241902, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.824534] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52876c8a-1501-7ec9-d274-7e36d9149e3b, 'name': SearchDatastore_Task, 'duration_secs': 0.010745} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.825686] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06b80856-3f36-4a9e-b81a-fa6e1756a9d1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.835930] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 855.835930] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52674c76-71d8-e60f-9100-b29b7c7f2c3e" [ 855.835930] env[69994]: _type = "Task" [ 855.835930] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.851329] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52674c76-71d8-e60f-9100-b29b7c7f2c3e, 'name': SearchDatastore_Task, 'duration_secs': 0.011327} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.851329] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 855.851329] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] cd5a47f2-147b-4e50-980d-8e1c40bc7594/cd5a47f2-147b-4e50-980d-8e1c40bc7594.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 855.851329] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5cef6311-ba05-4fdf-9c57-72a12b489fdc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.860863] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 855.860863] env[69994]: value = "task-3241904" [ 855.860863] env[69994]: _type = "Task" [ 855.860863] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.873823] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241904, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.019913] env[69994]: DEBUG nova.compute.manager [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 856.057408] env[69994]: DEBUG nova.virt.hardware [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 856.057664] env[69994]: DEBUG nova.virt.hardware [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 856.057824] env[69994]: DEBUG nova.virt.hardware [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 856.058016] env[69994]: DEBUG nova.virt.hardware [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 856.058169] env[69994]: DEBUG nova.virt.hardware [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 856.058314] env[69994]: DEBUG nova.virt.hardware [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 856.058527] env[69994]: DEBUG nova.virt.hardware [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 856.058685] env[69994]: DEBUG nova.virt.hardware [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 856.059244] env[69994]: DEBUG nova.virt.hardware [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 856.059244] env[69994]: DEBUG nova.virt.hardware [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 856.059244] env[69994]: DEBUG nova.virt.hardware [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 856.060103] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-001685f8-86ae-4323-a567-712b3370ad70 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.068929] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77a24a58-5e42-492f-aa64-5f4c91246080 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.107779] env[69994]: DEBUG oslo_concurrency.lockutils [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Releasing lock "refresh_cache-f07750f5-3f1d-4d97-98dc-285ed357cc7e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 856.117052] env[69994]: DEBUG oslo_concurrency.lockutils [req-ada5a668-a110-493c-9f4f-b825be12eb0b req-45e48b3d-2385-4774-afc2-c7dd1acf824c service nova] Releasing lock "refresh_cache-cd5a47f2-147b-4e50-980d-8e1c40bc7594" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 856.142839] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c183e7a-280d-4525-97b0-279e2a56dafb tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.156s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 856.150850] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a065b685-6998-44c1-a039-605646c19ef6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.501s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 856.150850] env[69994]: DEBUG nova.objects.instance [None req-a065b685-6998-44c1-a039-605646c19ef6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Lazy-loading 'resources' on Instance uuid c47c26c8-3f7f-436b-95aa-0bd08d41e62b {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 856.174936] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 856.180976] env[69994]: INFO nova.scheduler.client.report [None req-6c183e7a-280d-4525-97b0-279e2a56dafb tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Deleted allocations for instance d4f87534-813e-4ff6-8b1f-ee23cb0b8e80 [ 856.274704] env[69994]: DEBUG oslo_vmware.api [None req-8b79b1eb-c143-483e-a9f2-4eb3cb520791 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3241902, 'name': PowerOffVM_Task, 'duration_secs': 0.218858} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.275265] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b79b1eb-c143-483e-a9f2-4eb3cb520791 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 856.275652] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8b79b1eb-c143-483e-a9f2-4eb3cb520791 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 856.276058] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1e37e117-1e33-4997-899b-ee077cb65306 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.352644] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3e6d8609-ae8c-410f-8994-0064b83234cb tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Acquiring lock "7f66a148-86fe-4ddc-b8ed-6e6a306bbc24" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 856.352644] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3e6d8609-ae8c-410f-8994-0064b83234cb tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "7f66a148-86fe-4ddc-b8ed-6e6a306bbc24" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 856.353574] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8b79b1eb-c143-483e-a9f2-4eb3cb520791 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 856.353574] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8b79b1eb-c143-483e-a9f2-4eb3cb520791 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 856.353574] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b79b1eb-c143-483e-a9f2-4eb3cb520791 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Deleting the datastore file [datastore1] ffe5f2c6-69e7-4bdb-80d1-b421b695e790 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 856.353982] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ad90b33e-1875-4e3c-afd7-de8caa7fcce3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.361934] env[69994]: DEBUG oslo_vmware.api [None req-8b79b1eb-c143-483e-a9f2-4eb3cb520791 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Waiting for the task: (returnval){ [ 856.361934] env[69994]: value = "task-3241906" [ 856.361934] env[69994]: _type = "Task" [ 856.361934] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.379642] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241904, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.506086} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.385207] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] cd5a47f2-147b-4e50-980d-8e1c40bc7594/cd5a47f2-147b-4e50-980d-8e1c40bc7594.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 856.385876] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 856.386078] env[69994]: DEBUG oslo_vmware.api [None req-8b79b1eb-c143-483e-a9f2-4eb3cb520791 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3241906, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.388222] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c83ebebc-45bd-4ecc-918c-ce2308a6606b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.394922] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 856.394922] env[69994]: value = "task-3241907" [ 856.394922] env[69994]: _type = "Task" [ 856.394922] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.405100] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241907, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.635904] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f2b2d36-e36d-4a4d-b519-1f9a482615d3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.664054] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddad1a60-e6d7-48d1-b098-1c7703af5803 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.671631] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Updating instance 'f07750f5-3f1d-4d97-98dc-285ed357cc7e' progress to 83 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 856.692591] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c183e7a-280d-4525-97b0-279e2a56dafb tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Lock "d4f87534-813e-4ff6-8b1f-ee23cb0b8e80" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.782s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 856.855295] env[69994]: INFO nova.compute.manager [None req-3e6d8609-ae8c-410f-8994-0064b83234cb tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Detaching volume 7e4a5305-889c-4f6f-ae22-6686decd4bae [ 856.879326] env[69994]: DEBUG oslo_vmware.api [None req-8b79b1eb-c143-483e-a9f2-4eb3cb520791 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3241906, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.183123} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.882535] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b79b1eb-c143-483e-a9f2-4eb3cb520791 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 856.882535] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8b79b1eb-c143-483e-a9f2-4eb3cb520791 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 856.882535] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8b79b1eb-c143-483e-a9f2-4eb3cb520791 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 856.882535] env[69994]: INFO nova.compute.manager [None req-8b79b1eb-c143-483e-a9f2-4eb3cb520791 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Took 1.14 seconds to destroy the instance on the hypervisor. [ 856.883845] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8b79b1eb-c143-483e-a9f2-4eb3cb520791 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 856.883845] env[69994]: DEBUG nova.compute.manager [-] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 856.883845] env[69994]: DEBUG nova.network.neutron [-] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 856.902125] env[69994]: INFO nova.virt.block_device [None req-3e6d8609-ae8c-410f-8994-0064b83234cb tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Attempting to driver detach volume 7e4a5305-889c-4f6f-ae22-6686decd4bae from mountpoint /dev/sdb [ 856.902340] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e6d8609-ae8c-410f-8994-0064b83234cb tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Volume detach. Driver type: vmdk {{(pid=69994) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 856.904040] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e6d8609-ae8c-410f-8994-0064b83234cb tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647877', 'volume_id': '7e4a5305-889c-4f6f-ae22-6686decd4bae', 'name': 'volume-7e4a5305-889c-4f6f-ae22-6686decd4bae', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7f66a148-86fe-4ddc-b8ed-6e6a306bbc24', 'attached_at': '', 'detached_at': '', 'volume_id': '7e4a5305-889c-4f6f-ae22-6686decd4bae', 'serial': '7e4a5305-889c-4f6f-ae22-6686decd4bae'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 856.904040] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58bb537e-80db-46f3-81b5-3b3437ca92cd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.916628] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241907, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072758} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.933927] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 856.937526] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40cef7cc-912b-4bcd-9371-d8d918be514b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.942328] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-366c603d-b4df-43f6-8c73-b11c52c1d08e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.964340] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Reconfiguring VM instance instance-0000003a to attach disk [datastore2] cd5a47f2-147b-4e50-980d-8e1c40bc7594/cd5a47f2-147b-4e50-980d-8e1c40bc7594.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 856.970543] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-866b13c1-8c8c-4431-bf16-34382075c886 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.986073] env[69994]: DEBUG oslo_concurrency.lockutils [None req-66cd32c1-ba92-4044-8e03-4bef98c533a2 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Acquiring lock "214b3508-6fb9-455e-be6b-bd9f6902b7ae" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 856.986073] env[69994]: DEBUG oslo_concurrency.lockutils [None req-66cd32c1-ba92-4044-8e03-4bef98c533a2 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Lock "214b3508-6fb9-455e-be6b-bd9f6902b7ae" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 856.986073] env[69994]: DEBUG oslo_concurrency.lockutils [None req-66cd32c1-ba92-4044-8e03-4bef98c533a2 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Acquiring lock "214b3508-6fb9-455e-be6b-bd9f6902b7ae-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 856.986481] env[69994]: DEBUG oslo_concurrency.lockutils [None req-66cd32c1-ba92-4044-8e03-4bef98c533a2 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Lock "214b3508-6fb9-455e-be6b-bd9f6902b7ae-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 856.986481] env[69994]: DEBUG oslo_concurrency.lockutils [None req-66cd32c1-ba92-4044-8e03-4bef98c533a2 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Lock "214b3508-6fb9-455e-be6b-bd9f6902b7ae-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 856.990248] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49fab7ea-b8b8-4a1e-9f82-5de0c12962d7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.991784] env[69994]: INFO nova.compute.manager [None req-66cd32c1-ba92-4044-8e03-4bef98c533a2 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Terminating instance [ 857.020061] env[69994]: DEBUG nova.compute.manager [None req-66cd32c1-ba92-4044-8e03-4bef98c533a2 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 857.020308] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-66cd32c1-ba92-4044-8e03-4bef98c533a2 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 857.021258] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63a5bf56-5250-49a9-8b38-c95c49101ce0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.024267] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 857.024267] env[69994]: value = "task-3241908" [ 857.024267] env[69994]: _type = "Task" [ 857.024267] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.024986] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76297e6c-7774-48d7-8da8-a3e7a1c41a34 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.043596] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e6d8609-ae8c-410f-8994-0064b83234cb tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] The volume has not been displaced from its original location: [datastore2] volume-7e4a5305-889c-4f6f-ae22-6686decd4bae/volume-7e4a5305-889c-4f6f-ae22-6686decd4bae.vmdk. No consolidation needed. {{(pid=69994) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 857.048375] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e6d8609-ae8c-410f-8994-0064b83234cb tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Reconfiguring VM instance instance-00000019 to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 857.054304] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e74300d8-7daa-4a5a-82f7-a4d9af6ad29a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.066866] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-66cd32c1-ba92-4044-8e03-4bef98c533a2 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 857.071100] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a092b078-0754-4b0f-8574-ffd610fe74b1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.072776] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241908, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.082184] env[69994]: DEBUG oslo_vmware.api [None req-66cd32c1-ba92-4044-8e03-4bef98c533a2 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Waiting for the task: (returnval){ [ 857.082184] env[69994]: value = "task-3241909" [ 857.082184] env[69994]: _type = "Task" [ 857.082184] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.083027] env[69994]: DEBUG oslo_vmware.api [None req-3e6d8609-ae8c-410f-8994-0064b83234cb tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for the task: (returnval){ [ 857.083027] env[69994]: value = "task-3241910" [ 857.083027] env[69994]: _type = "Task" [ 857.083027] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.101916] env[69994]: DEBUG oslo_vmware.api [None req-3e6d8609-ae8c-410f-8994-0064b83234cb tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3241910, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.101916] env[69994]: DEBUG oslo_vmware.api [None req-66cd32c1-ba92-4044-8e03-4bef98c533a2 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241909, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.179388] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 857.182396] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f442a9a1-92d9-4503-99b0-eb273b2fc9b4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.191787] env[69994]: DEBUG oslo_vmware.api [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for the task: (returnval){ [ 857.191787] env[69994]: value = "task-3241911" [ 857.191787] env[69994]: _type = "Task" [ 857.191787] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.203374] env[69994]: DEBUG oslo_vmware.api [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241911, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.312932] env[69994]: DEBUG nova.compute.manager [req-96709e67-6583-479a-8151-b903dc1f651a req-70054709-5a2d-44df-9691-1f658566d22c service nova] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Received event network-vif-plugged-77897887-9eb8-476e-82c5-c0ab7f57adad {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 857.313172] env[69994]: DEBUG oslo_concurrency.lockutils [req-96709e67-6583-479a-8151-b903dc1f651a req-70054709-5a2d-44df-9691-1f658566d22c service nova] Acquiring lock "14b28a21-1b71-4d7e-bd6c-269f5d588300-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 857.313381] env[69994]: DEBUG oslo_concurrency.lockutils [req-96709e67-6583-479a-8151-b903dc1f651a req-70054709-5a2d-44df-9691-1f658566d22c service nova] Lock "14b28a21-1b71-4d7e-bd6c-269f5d588300-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 857.313548] env[69994]: DEBUG oslo_concurrency.lockutils [req-96709e67-6583-479a-8151-b903dc1f651a req-70054709-5a2d-44df-9691-1f658566d22c service nova] Lock "14b28a21-1b71-4d7e-bd6c-269f5d588300-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 857.313711] env[69994]: DEBUG nova.compute.manager [req-96709e67-6583-479a-8151-b903dc1f651a req-70054709-5a2d-44df-9691-1f658566d22c service nova] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] No waiting events found dispatching network-vif-plugged-77897887-9eb8-476e-82c5-c0ab7f57adad {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 857.313983] env[69994]: WARNING nova.compute.manager [req-96709e67-6583-479a-8151-b903dc1f651a req-70054709-5a2d-44df-9691-1f658566d22c service nova] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Received unexpected event network-vif-plugged-77897887-9eb8-476e-82c5-c0ab7f57adad for instance with vm_state building and task_state spawning. [ 857.362429] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5597e310-714d-4176-9a18-a108115964b6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.370703] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d6ed26-612f-4cd9-8e82-7d0fee709c3f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.377622] env[69994]: DEBUG nova.network.neutron [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Successfully updated port: 77897887-9eb8-476e-82c5-c0ab7f57adad {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 857.421965] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06b7bdbf-5e2e-4fee-ba48-345317b6c3d5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.432406] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec8a2666-bed2-4eb6-8435-7a22fc26e43f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.449272] env[69994]: DEBUG nova.compute.provider_tree [None req-a065b685-6998-44c1-a039-605646c19ef6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 857.539360] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241908, 'name': ReconfigVM_Task, 'duration_secs': 0.354891} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.539360] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Reconfigured VM instance instance-0000003a to attach disk [datastore2] cd5a47f2-147b-4e50-980d-8e1c40bc7594/cd5a47f2-147b-4e50-980d-8e1c40bc7594.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 857.539360] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b91685e6-06af-42e2-857c-a11522a7d94d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.546040] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 857.546040] env[69994]: value = "task-3241912" [ 857.546040] env[69994]: _type = "Task" [ 857.546040] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.555517] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241912, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.569793] env[69994]: DEBUG nova.compute.manager [req-380db8eb-99eb-4e46-bbd2-7c941df5e929 req-c27fcad3-938e-4bf0-9208-cdd685347dfa service nova] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Received event network-vif-deleted-b6545d7e-9893-450b-9d3b-67d4d7affbe4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 857.570068] env[69994]: INFO nova.compute.manager [req-380db8eb-99eb-4e46-bbd2-7c941df5e929 req-c27fcad3-938e-4bf0-9208-cdd685347dfa service nova] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Neutron deleted interface b6545d7e-9893-450b-9d3b-67d4d7affbe4; detaching it from the instance and deleting it from the info cache [ 857.570261] env[69994]: DEBUG nova.network.neutron [req-380db8eb-99eb-4e46-bbd2-7c941df5e929 req-c27fcad3-938e-4bf0-9208-cdd685347dfa service nova] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.604420] env[69994]: DEBUG oslo_vmware.api [None req-66cd32c1-ba92-4044-8e03-4bef98c533a2 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241909, 'name': PowerOffVM_Task, 'duration_secs': 0.333357} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.604420] env[69994]: DEBUG oslo_vmware.api [None req-3e6d8609-ae8c-410f-8994-0064b83234cb tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3241910, 'name': ReconfigVM_Task, 'duration_secs': 0.356664} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.604420] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-66cd32c1-ba92-4044-8e03-4bef98c533a2 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 857.604420] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-66cd32c1-ba92-4044-8e03-4bef98c533a2 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 857.604420] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e6d8609-ae8c-410f-8994-0064b83234cb tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Reconfigured VM instance instance-00000019 to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 857.607939] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a8d1532d-0be1-434f-ad6e-be6a31dfbda5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.610059] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8dd70839-3f48-4737-981a-1413f5581bfd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.628267] env[69994]: DEBUG oslo_vmware.api [None req-3e6d8609-ae8c-410f-8994-0064b83234cb tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for the task: (returnval){ [ 857.628267] env[69994]: value = "task-3241914" [ 857.628267] env[69994]: _type = "Task" [ 857.628267] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.641389] env[69994]: DEBUG oslo_vmware.api [None req-3e6d8609-ae8c-410f-8994-0064b83234cb tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3241914, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.704492] env[69994]: DEBUG oslo_vmware.api [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241911, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.707358] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-66cd32c1-ba92-4044-8e03-4bef98c533a2 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 857.707358] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-66cd32c1-ba92-4044-8e03-4bef98c533a2 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 857.707358] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-66cd32c1-ba92-4044-8e03-4bef98c533a2 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Deleting the datastore file [datastore1] 214b3508-6fb9-455e-be6b-bd9f6902b7ae {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 857.707509] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9efb1e3c-69d0-4e88-8da7-3310bfd4eb62 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.715015] env[69994]: DEBUG oslo_vmware.api [None req-66cd32c1-ba92-4044-8e03-4bef98c533a2 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Waiting for the task: (returnval){ [ 857.715015] env[69994]: value = "task-3241915" [ 857.715015] env[69994]: _type = "Task" [ 857.715015] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.724684] env[69994]: DEBUG oslo_vmware.api [None req-66cd32c1-ba92-4044-8e03-4bef98c533a2 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241915, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.766492] env[69994]: DEBUG nova.network.neutron [-] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.880134] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquiring lock "refresh_cache-14b28a21-1b71-4d7e-bd6c-269f5d588300" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.880321] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquired lock "refresh_cache-14b28a21-1b71-4d7e-bd6c-269f5d588300" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 857.880485] env[69994]: DEBUG nova.network.neutron [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 857.953589] env[69994]: DEBUG nova.scheduler.client.report [None req-a065b685-6998-44c1-a039-605646c19ef6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 858.061455] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241912, 'name': Rename_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.075485] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0ebf7d6e-a1dc-4a44-8337-446aee341799 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.087455] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b285d45-b540-43ec-a658-74e20d2604e4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.136194] env[69994]: DEBUG nova.compute.manager [req-380db8eb-99eb-4e46-bbd2-7c941df5e929 req-c27fcad3-938e-4bf0-9208-cdd685347dfa service nova] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Detach interface failed, port_id=b6545d7e-9893-450b-9d3b-67d4d7affbe4, reason: Instance ffe5f2c6-69e7-4bdb-80d1-b421b695e790 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 858.146095] env[69994]: DEBUG oslo_vmware.api [None req-3e6d8609-ae8c-410f-8994-0064b83234cb tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3241914, 'name': ReconfigVM_Task, 'duration_secs': 0.179545} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.146448] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e6d8609-ae8c-410f-8994-0064b83234cb tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647877', 'volume_id': '7e4a5305-889c-4f6f-ae22-6686decd4bae', 'name': 'volume-7e4a5305-889c-4f6f-ae22-6686decd4bae', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7f66a148-86fe-4ddc-b8ed-6e6a306bbc24', 'attached_at': '', 'detached_at': '', 'volume_id': '7e4a5305-889c-4f6f-ae22-6686decd4bae', 'serial': '7e4a5305-889c-4f6f-ae22-6686decd4bae'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 858.173232] env[69994]: DEBUG oslo_concurrency.lockutils [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Acquiring lock "4ca53416-caed-418c-bb40-cabb8e311803" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 858.173480] env[69994]: DEBUG oslo_concurrency.lockutils [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Lock "4ca53416-caed-418c-bb40-cabb8e311803" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 858.202763] env[69994]: DEBUG oslo_vmware.api [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3241911, 'name': PowerOnVM_Task, 'duration_secs': 0.663773} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.202763] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 858.202931] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-37620550-428e-4b5b-b4f2-328ef6407b95 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Updating instance 'f07750f5-3f1d-4d97-98dc-285ed357cc7e' progress to 100 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 858.224800] env[69994]: DEBUG oslo_vmware.api [None req-66cd32c1-ba92-4044-8e03-4bef98c533a2 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3241915, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157046} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.225117] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-66cd32c1-ba92-4044-8e03-4bef98c533a2 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 858.225260] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-66cd32c1-ba92-4044-8e03-4bef98c533a2 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 858.225463] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-66cd32c1-ba92-4044-8e03-4bef98c533a2 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 858.225669] env[69994]: INFO nova.compute.manager [None req-66cd32c1-ba92-4044-8e03-4bef98c533a2 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Took 1.21 seconds to destroy the instance on the hypervisor. [ 858.225918] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-66cd32c1-ba92-4044-8e03-4bef98c533a2 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 858.226130] env[69994]: DEBUG nova.compute.manager [-] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 858.226230] env[69994]: DEBUG nova.network.neutron [-] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 858.269780] env[69994]: INFO nova.compute.manager [-] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Took 1.39 seconds to deallocate network for instance. [ 858.424222] env[69994]: DEBUG nova.network.neutron [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 858.463701] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a065b685-6998-44c1-a039-605646c19ef6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.315s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 858.466764] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.808s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 858.468460] env[69994]: INFO nova.compute.claims [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 858.492800] env[69994]: INFO nova.scheduler.client.report [None req-a065b685-6998-44c1-a039-605646c19ef6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Deleted allocations for instance c47c26c8-3f7f-436b-95aa-0bd08d41e62b [ 858.561233] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241912, 'name': Rename_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.584123] env[69994]: DEBUG nova.network.neutron [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Updating instance_info_cache with network_info: [{"id": "77897887-9eb8-476e-82c5-c0ab7f57adad", "address": "fa:16:3e:96:71:b9", "network": {"id": "200c7470-668b-414c-81ff-38c361914b33", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1520904988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91fa55bf90ff43a8b255a1e2fa2c22be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77897887-9e", "ovs_interfaceid": "77897887-9eb8-476e-82c5-c0ab7f57adad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.697820] env[69994]: DEBUG nova.objects.instance [None req-3e6d8609-ae8c-410f-8994-0064b83234cb tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lazy-loading 'flavor' on Instance uuid 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 858.783253] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8b79b1eb-c143-483e-a9f2-4eb3cb520791 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 859.006565] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a065b685-6998-44c1-a039-605646c19ef6 tempest-ServerRescueTestJSON-2013304110 tempest-ServerRescueTestJSON-2013304110-project-member] Lock "c47c26c8-3f7f-436b-95aa-0bd08d41e62b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.839s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 859.042649] env[69994]: DEBUG nova.network.neutron [-] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 859.061338] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241912, 'name': Rename_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.086792] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Releasing lock "refresh_cache-14b28a21-1b71-4d7e-bd6c-269f5d588300" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 859.087149] env[69994]: DEBUG nova.compute.manager [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Instance network_info: |[{"id": "77897887-9eb8-476e-82c5-c0ab7f57adad", "address": "fa:16:3e:96:71:b9", "network": {"id": "200c7470-668b-414c-81ff-38c361914b33", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1520904988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91fa55bf90ff43a8b255a1e2fa2c22be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77897887-9e", "ovs_interfaceid": "77897887-9eb8-476e-82c5-c0ab7f57adad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 859.087553] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:96:71:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '77897887-9eb8-476e-82c5-c0ab7f57adad', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 859.095819] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 859.096387] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 859.096622] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3cd11244-5c0a-4033-8f27-a16d82e61fa6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.123569] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 859.123569] env[69994]: value = "task-3241916" [ 859.123569] env[69994]: _type = "Task" [ 859.123569] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.134515] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241916, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.555023] env[69994]: INFO nova.compute.manager [-] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Took 1.32 seconds to deallocate network for instance. [ 859.579129] env[69994]: DEBUG nova.compute.manager [req-2b0ed3c4-f35e-4894-a4fa-8abe2feaedb5 req-f1dc5028-6fd4-4367-9117-d117b2e7876e service nova] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Received event network-changed-77897887-9eb8-476e-82c5-c0ab7f57adad {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 859.579129] env[69994]: DEBUG nova.compute.manager [req-2b0ed3c4-f35e-4894-a4fa-8abe2feaedb5 req-f1dc5028-6fd4-4367-9117-d117b2e7876e service nova] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Refreshing instance network info cache due to event network-changed-77897887-9eb8-476e-82c5-c0ab7f57adad. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 859.579129] env[69994]: DEBUG oslo_concurrency.lockutils [req-2b0ed3c4-f35e-4894-a4fa-8abe2feaedb5 req-f1dc5028-6fd4-4367-9117-d117b2e7876e service nova] Acquiring lock "refresh_cache-14b28a21-1b71-4d7e-bd6c-269f5d588300" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.579129] env[69994]: DEBUG oslo_concurrency.lockutils [req-2b0ed3c4-f35e-4894-a4fa-8abe2feaedb5 req-f1dc5028-6fd4-4367-9117-d117b2e7876e service nova] Acquired lock "refresh_cache-14b28a21-1b71-4d7e-bd6c-269f5d588300" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 859.579129] env[69994]: DEBUG nova.network.neutron [req-2b0ed3c4-f35e-4894-a4fa-8abe2feaedb5 req-f1dc5028-6fd4-4367-9117-d117b2e7876e service nova] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Refreshing network info cache for port 77897887-9eb8-476e-82c5-c0ab7f57adad {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 859.592027] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241912, 'name': Rename_Task, 'duration_secs': 1.971555} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.594018] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 859.595792] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4adecb7a-4f40-4e2b-be26-62d7558035b4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.606883] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 859.606883] env[69994]: value = "task-3241917" [ 859.606883] env[69994]: _type = "Task" [ 859.606883] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.621874] env[69994]: DEBUG oslo_concurrency.lockutils [None req-57df050b-349c-4fe3-aff4-b665a1526823 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Acquiring lock "7f66a148-86fe-4ddc-b8ed-6e6a306bbc24" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 859.632284] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241917, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.652420] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241916, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.711677] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3e6d8609-ae8c-410f-8994-0064b83234cb tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "7f66a148-86fe-4ddc-b8ed-6e6a306bbc24" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.359s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 859.715828] env[69994]: DEBUG oslo_concurrency.lockutils [None req-57df050b-349c-4fe3-aff4-b665a1526823 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "7f66a148-86fe-4ddc-b8ed-6e6a306bbc24" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.094s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 859.715957] env[69994]: DEBUG nova.compute.manager [None req-57df050b-349c-4fe3-aff4-b665a1526823 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 859.717291] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e715dd1-ad7f-404d-952c-15742275e6a7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.725463] env[69994]: DEBUG nova.compute.manager [None req-57df050b-349c-4fe3-aff4-b665a1526823 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69994) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 859.725628] env[69994]: DEBUG nova.objects.instance [None req-57df050b-349c-4fe3-aff4-b665a1526823 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lazy-loading 'flavor' on Instance uuid 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 859.990347] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Acquiring lock "c14851d2-66c5-4865-ae66-abbe303f0c31" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 859.990571] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Lock "c14851d2-66c5-4865-ae66-abbe303f0c31" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 860.075349] env[69994]: DEBUG oslo_concurrency.lockutils [None req-66cd32c1-ba92-4044-8e03-4bef98c533a2 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 860.125956] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241917, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.128744] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e773c86-aadb-4808-b900-a8d876efb752 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.147687] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241916, 'name': CreateVM_Task, 'duration_secs': 0.654142} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.149304] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 860.150149] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 860.150323] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 860.151230] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 860.151660] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ad015ca-3897-4dc6-bf0a-1a1443c12695 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.155667] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99532763-aeda-4ce5-ba6e-939025924622 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.162298] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 860.162298] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]525e0956-65d3-c3b6-8b8f-3c1f78afea9b" [ 860.162298] env[69994]: _type = "Task" [ 860.162298] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.196177] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48d66381-b2dd-44aa-bfa7-0864f8efc80a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.209879] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e44ad72-2f13-4888-90ff-423834e3b82e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.213991] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]525e0956-65d3-c3b6-8b8f-3c1f78afea9b, 'name': SearchDatastore_Task, 'duration_secs': 0.012815} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.214825] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 860.215085] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 860.215319] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 860.215461] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 860.215635] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 860.219854] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7aa7676a-5ba8-4cd0-aba3-94fd12611cc8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.232927] env[69994]: DEBUG nova.compute.provider_tree [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 860.238181] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 860.238181] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 860.242861] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd80949b-3ef5-4df7-a1f8-c08642b1a39c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.246147] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 860.246147] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]521e5f29-7667-f872-8a19-3d14af804c83" [ 860.246147] env[69994]: _type = "Task" [ 860.246147] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.261379] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521e5f29-7667-f872-8a19-3d14af804c83, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.353652] env[69994]: DEBUG nova.network.neutron [req-2b0ed3c4-f35e-4894-a4fa-8abe2feaedb5 req-f1dc5028-6fd4-4367-9117-d117b2e7876e service nova] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Updated VIF entry in instance network info cache for port 77897887-9eb8-476e-82c5-c0ab7f57adad. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 860.354030] env[69994]: DEBUG nova.network.neutron [req-2b0ed3c4-f35e-4894-a4fa-8abe2feaedb5 req-f1dc5028-6fd4-4367-9117-d117b2e7876e service nova] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Updating instance_info_cache with network_info: [{"id": "77897887-9eb8-476e-82c5-c0ab7f57adad", "address": "fa:16:3e:96:71:b9", "network": {"id": "200c7470-668b-414c-81ff-38c361914b33", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1520904988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91fa55bf90ff43a8b255a1e2fa2c22be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77897887-9e", "ovs_interfaceid": "77897887-9eb8-476e-82c5-c0ab7f57adad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.623823] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241917, 'name': PowerOnVM_Task, 'duration_secs': 0.679444} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.624120] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 860.624328] env[69994]: INFO nova.compute.manager [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Took 9.63 seconds to spawn the instance on the hypervisor. [ 860.624507] env[69994]: DEBUG nova.compute.manager [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 860.625388] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a783b6e2-ccd3-4d5d-8440-226bbb27b245 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.741166] env[69994]: DEBUG nova.scheduler.client.report [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 860.744723] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-57df050b-349c-4fe3-aff4-b665a1526823 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 860.745208] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-35c0ba98-6281-40ea-8d36-ac5465db65bc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.758064] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521e5f29-7667-f872-8a19-3d14af804c83, 'name': SearchDatastore_Task, 'duration_secs': 0.013293} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.760674] env[69994]: DEBUG oslo_vmware.api [None req-57df050b-349c-4fe3-aff4-b665a1526823 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for the task: (returnval){ [ 860.760674] env[69994]: value = "task-3241918" [ 860.760674] env[69994]: _type = "Task" [ 860.760674] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.760878] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07315c37-f070-4214-8305-9d3c9de2c32f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.779948] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 860.779948] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]526b3bf4-a158-cb40-a025-0cd63bf67d3e" [ 860.779948] env[69994]: _type = "Task" [ 860.779948] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.787809] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]526b3bf4-a158-cb40-a025-0cd63bf67d3e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.856976] env[69994]: DEBUG oslo_concurrency.lockutils [req-2b0ed3c4-f35e-4894-a4fa-8abe2feaedb5 req-f1dc5028-6fd4-4367-9117-d117b2e7876e service nova] Releasing lock "refresh_cache-14b28a21-1b71-4d7e-bd6c-269f5d588300" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 860.857426] env[69994]: DEBUG nova.compute.manager [req-2b0ed3c4-f35e-4894-a4fa-8abe2feaedb5 req-f1dc5028-6fd4-4367-9117-d117b2e7876e service nova] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Received event network-vif-deleted-aa1c3cb9-5c3d-4700-af3b-94fbe3952be2 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 861.121156] env[69994]: DEBUG nova.network.neutron [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Port 03a2cce0-4737-45b4-8482-4eabd0e63386 binding to destination host cpu-1 is already ACTIVE {{(pid=69994) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 861.121356] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquiring lock "refresh_cache-f07750f5-3f1d-4d97-98dc-285ed357cc7e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.121510] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquired lock "refresh_cache-f07750f5-3f1d-4d97-98dc-285ed357cc7e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 861.121678] env[69994]: DEBUG nova.network.neutron [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 861.144029] env[69994]: INFO nova.compute.manager [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Took 51.52 seconds to build instance. [ 861.246797] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.780s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 861.247641] env[69994]: DEBUG nova.compute.manager [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 861.250496] env[69994]: DEBUG oslo_concurrency.lockutils [None req-90c5962c-3382-44b1-af3f-016a41b350b1 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.767s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 861.250721] env[69994]: DEBUG nova.objects.instance [None req-90c5962c-3382-44b1-af3f-016a41b350b1 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Lazy-loading 'resources' on Instance uuid df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 861.275927] env[69994]: DEBUG oslo_vmware.api [None req-57df050b-349c-4fe3-aff4-b665a1526823 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3241918, 'name': PowerOffVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.289560] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]526b3bf4-a158-cb40-a025-0cd63bf67d3e, 'name': SearchDatastore_Task, 'duration_secs': 0.020718} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.289805] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 861.290084] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 14b28a21-1b71-4d7e-bd6c-269f5d588300/14b28a21-1b71-4d7e-bd6c-269f5d588300.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 861.290367] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8155d06c-2889-4c52-ab6c-c6e38e879bf8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.297093] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 861.297093] env[69994]: value = "task-3241919" [ 861.297093] env[69994]: _type = "Task" [ 861.297093] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.645998] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "cd5a47f2-147b-4e50-980d-8e1c40bc7594" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 88.344s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 861.757076] env[69994]: DEBUG nova.compute.utils [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 861.759645] env[69994]: DEBUG nova.compute.manager [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Not allocating networking since 'none' was specified. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 861.781178] env[69994]: DEBUG oslo_vmware.api [None req-57df050b-349c-4fe3-aff4-b665a1526823 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3241918, 'name': PowerOffVM_Task, 'duration_secs': 0.541104} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.781178] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-57df050b-349c-4fe3-aff4-b665a1526823 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 861.781409] env[69994]: DEBUG nova.compute.manager [None req-57df050b-349c-4fe3-aff4-b665a1526823 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 861.782718] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cbe7cfa-3be8-4d77-8419-458cdd87b45c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.810687] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241919, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.848550] env[69994]: DEBUG nova.network.neutron [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Updating instance_info_cache with network_info: [{"id": "03a2cce0-4737-45b4-8482-4eabd0e63386", "address": "fa:16:3e:fe:86:b3", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.76", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03a2cce0-47", "ovs_interfaceid": "03a2cce0-4737-45b4-8482-4eabd0e63386", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 862.150635] env[69994]: DEBUG nova.compute.manager [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 862.241551] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75d5ab5f-bb01-43fc-a14f-25e5f65093f4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.250125] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcd34b0a-09d5-4ee6-8c62-9fddc81aabb8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.281858] env[69994]: DEBUG nova.compute.manager [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 862.285525] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db410807-c39e-4b69-b262-028bcdc6e80c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.294092] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bada088-726d-4e45-8c57-d55bae1b5610 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.315956] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241919, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.560106} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.316442] env[69994]: DEBUG nova.compute.provider_tree [None req-90c5962c-3382-44b1-af3f-016a41b350b1 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 862.318473] env[69994]: DEBUG oslo_concurrency.lockutils [None req-57df050b-349c-4fe3-aff4-b665a1526823 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "7f66a148-86fe-4ddc-b8ed-6e6a306bbc24" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.603s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 862.319315] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 14b28a21-1b71-4d7e-bd6c-269f5d588300/14b28a21-1b71-4d7e-bd6c-269f5d588300.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 862.319531] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 862.320280] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-58ec5a2f-54ae-4b6b-a2f1-1aac0f27e18d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.329292] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 862.329292] env[69994]: value = "task-3241920" [ 862.329292] env[69994]: _type = "Task" [ 862.329292] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.339256] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241920, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.351278] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Releasing lock "refresh_cache-f07750f5-3f1d-4d97-98dc-285ed357cc7e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 862.669086] env[69994]: DEBUG oslo_concurrency.lockutils [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 862.821926] env[69994]: DEBUG nova.scheduler.client.report [None req-90c5962c-3382-44b1-af3f-016a41b350b1 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 862.839201] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241920, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076708} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.840116] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 862.840967] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81b11079-3f78-40c0-998c-723052c33604 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.856993] env[69994]: DEBUG nova.compute.manager [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=69994) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 862.857238] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 862.865986] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Reconfiguring VM instance instance-0000003b to attach disk [datastore2] 14b28a21-1b71-4d7e-bd6c-269f5d588300/14b28a21-1b71-4d7e-bd6c-269f5d588300.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 862.867401] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-910e8f66-ecc7-481e-bab2-748d1b2c82b0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.891112] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 862.891112] env[69994]: value = "task-3241921" [ 862.891112] env[69994]: _type = "Task" [ 862.891112] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.902563] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241921, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.295100] env[69994]: DEBUG nova.compute.manager [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 863.325253] env[69994]: DEBUG nova.virt.hardware [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 863.325552] env[69994]: DEBUG nova.virt.hardware [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 863.325747] env[69994]: DEBUG nova.virt.hardware [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 863.326057] env[69994]: DEBUG nova.virt.hardware [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 863.326296] env[69994]: DEBUG nova.virt.hardware [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 863.326497] env[69994]: DEBUG nova.virt.hardware [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 863.326749] env[69994]: DEBUG nova.virt.hardware [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 863.326968] env[69994]: DEBUG nova.virt.hardware [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 863.327210] env[69994]: DEBUG nova.virt.hardware [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 863.327415] env[69994]: DEBUG nova.virt.hardware [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 863.327728] env[69994]: DEBUG nova.virt.hardware [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 863.328586] env[69994]: DEBUG oslo_concurrency.lockutils [None req-90c5962c-3382-44b1-af3f-016a41b350b1 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.078s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 863.331241] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10514481-23d0-47f9-81ce-95adc3acf6c2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.334988] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 39.536s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 863.335585] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 863.335585] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69994) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 863.335782] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.648s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 863.340087] env[69994]: INFO nova.compute.claims [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 863.341298] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3848bf1-c53f-4e3c-92d3-8af0e0ea5d2c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.354019] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b4cb7df-60b7-418c-a386-2a1762537293 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.357503] env[69994]: INFO nova.scheduler.client.report [None req-90c5962c-3382-44b1-af3f-016a41b350b1 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Deleted allocations for instance df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df [ 863.359618] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53ea812d-966e-4b41-91f8-f4d8310636ea {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.384949] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d723ef70-307f-4ec2-996a-5f2684b23cf9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.388155] env[69994]: DEBUG nova.objects.instance [None req-4f140bef-e34b-4d30-9be0-7c8325a5d5e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lazy-loading 'flavor' on Instance uuid 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 863.389686] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Instance VIF info [] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 863.395447] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Creating folder: Project (d0a1e95adef44ed6823ec93024130ba9). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 863.396726] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a9b60700-7b5c-405a-bd8e-37c8cbf669f4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.408677] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d02abbfa-97e5-4a20-8bd1-98ebfdea279e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.416266] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241921, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.418328] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Created folder: Project (d0a1e95adef44ed6823ec93024130ba9) in parent group-v647729. [ 863.418544] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Creating folder: Instances. Parent ref: group-v647909. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 863.418794] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6900b08a-38d3-441f-b886-e1c07c6ce75d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.444866] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178260MB free_disk=119GB free_vcpus=48 pci_devices=None {{(pid=69994) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 863.445057] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 863.455753] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Created folder: Instances in parent group-v647909. [ 863.456084] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 863.456311] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 863.456531] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2763e6f6-ac98-42bb-af1a-317694240f19 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.475573] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 863.475573] env[69994]: value = "task-3241924" [ 863.475573] env[69994]: _type = "Task" [ 863.475573] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.485832] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241924, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.869841] env[69994]: DEBUG oslo_concurrency.lockutils [None req-90c5962c-3382-44b1-af3f-016a41b350b1 tempest-AttachInterfacesV270Test-1448242965 tempest-AttachInterfacesV270Test-1448242965-project-member] Lock "df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.057s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 863.901987] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4f140bef-e34b-4d30-9be0-7c8325a5d5e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Acquiring lock "refresh_cache-7f66a148-86fe-4ddc-b8ed-6e6a306bbc24" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.902183] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4f140bef-e34b-4d30-9be0-7c8325a5d5e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Acquired lock "refresh_cache-7f66a148-86fe-4ddc-b8ed-6e6a306bbc24" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 863.902352] env[69994]: DEBUG nova.network.neutron [None req-4f140bef-e34b-4d30-9be0-7c8325a5d5e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 863.902527] env[69994]: DEBUG nova.objects.instance [None req-4f140bef-e34b-4d30-9be0-7c8325a5d5e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lazy-loading 'info_cache' on Instance uuid 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 863.910031] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241921, 'name': ReconfigVM_Task, 'duration_secs': 0.539917} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.910299] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Reconfigured VM instance instance-0000003b to attach disk [datastore2] 14b28a21-1b71-4d7e-bd6c-269f5d588300/14b28a21-1b71-4d7e-bd6c-269f5d588300.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 863.910895] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-580ae560-0065-43f3-bf7c-1d4fe0f78515 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.919242] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 863.919242] env[69994]: value = "task-3241925" [ 863.919242] env[69994]: _type = "Task" [ 863.919242] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.927640] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241925, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.985160] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241924, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.406403] env[69994]: DEBUG nova.objects.base [None req-4f140bef-e34b-4d30-9be0-7c8325a5d5e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Object Instance<7f66a148-86fe-4ddc-b8ed-6e6a306bbc24> lazy-loaded attributes: flavor,info_cache {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 864.434017] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241925, 'name': Rename_Task, 'duration_secs': 0.174505} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.434017] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 864.434017] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-678d3f36-624c-4e4d-bf8c-18042f1a2052 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.441768] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 864.441768] env[69994]: value = "task-3241926" [ 864.441768] env[69994]: _type = "Task" [ 864.441768] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.451819] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241926, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.489808] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241924, 'name': CreateVM_Task, 'duration_secs': 0.619637} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.489957] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 864.490403] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.492029] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 864.492029] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 864.492029] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-311c4a89-838e-43d6-b590-bd2bb33d14cd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.500119] env[69994]: DEBUG oslo_vmware.api [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Waiting for the task: (returnval){ [ 864.500119] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52a35871-8a33-44a0-b08d-f5c5a809eaaa" [ 864.500119] env[69994]: _type = "Task" [ 864.500119] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.510339] env[69994]: DEBUG oslo_vmware.api [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52a35871-8a33-44a0-b08d-f5c5a809eaaa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.825938] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8a70b14-049e-40ee-9648-9f4d884f2b2d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.834112] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9326c00b-8186-47ec-b3e9-54ed96216442 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.866920] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aee35b3-37b8-4156-a4fa-607e85357f79 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.876217] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d68b9fd4-41f0-4a87-9624-e5992d54d156 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.890433] env[69994]: DEBUG nova.compute.provider_tree [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 864.955754] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241926, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.011035] env[69994]: DEBUG oslo_vmware.api [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52a35871-8a33-44a0-b08d-f5c5a809eaaa, 'name': SearchDatastore_Task, 'duration_secs': 0.012923} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.011357] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 865.011642] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 865.011925] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.012104] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 865.012285] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 865.012560] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ce149a4e-0b18-4d36-bfda-ca4922e7507a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.022429] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 865.022674] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 865.025620] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-017f4d96-d3a7-4d6c-9cf1-26e917cac187 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.031905] env[69994]: DEBUG oslo_vmware.api [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Waiting for the task: (returnval){ [ 865.031905] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52dc31e4-386c-ca5b-2dcd-b902e33d8e57" [ 865.031905] env[69994]: _type = "Task" [ 865.031905] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.040720] env[69994]: DEBUG oslo_vmware.api [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52dc31e4-386c-ca5b-2dcd-b902e33d8e57, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.142643] env[69994]: DEBUG nova.network.neutron [None req-4f140bef-e34b-4d30-9be0-7c8325a5d5e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Updating instance_info_cache with network_info: [{"id": "f2652bdf-bba7-4a73-9045-397e55945ed1", "address": "fa:16:3e:c5:70:d6", "network": {"id": "6e027deb-e9dd-4d16-a0cf-5c75f1d53722", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1614407267-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.188", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7799f51750bb4c2589042a3b7bc8af01", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721c6720-3ce0-450e-9951-a894f03acc27", "external-id": "nsx-vlan-transportzone-394", "segmentation_id": 394, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2652bdf-bb", "ovs_interfaceid": "f2652bdf-bba7-4a73-9045-397e55945ed1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.396362] env[69994]: DEBUG nova.scheduler.client.report [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 865.453022] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241926, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.548382] env[69994]: DEBUG oslo_vmware.api [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52dc31e4-386c-ca5b-2dcd-b902e33d8e57, 'name': SearchDatastore_Task, 'duration_secs': 0.010543} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.548382] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e70439e1-696d-498a-97aa-3299ac98678a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.556682] env[69994]: DEBUG oslo_vmware.api [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Waiting for the task: (returnval){ [ 865.556682] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]523e8447-d854-4aa4-5f76-5f8f42e6aaa1" [ 865.556682] env[69994]: _type = "Task" [ 865.556682] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.568339] env[69994]: DEBUG oslo_vmware.api [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]523e8447-d854-4aa4-5f76-5f8f42e6aaa1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.645884] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4f140bef-e34b-4d30-9be0-7c8325a5d5e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Releasing lock "refresh_cache-7f66a148-86fe-4ddc-b8ed-6e6a306bbc24" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 865.902630] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.566s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 865.902630] env[69994]: DEBUG nova.compute.manager [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 865.904879] env[69994]: DEBUG oslo_concurrency.lockutils [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.667s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.906447] env[69994]: INFO nova.compute.claims [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 865.955610] env[69994]: DEBUG oslo_vmware.api [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241926, 'name': PowerOnVM_Task, 'duration_secs': 1.136179} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.955890] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 865.955890] env[69994]: INFO nova.compute.manager [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Took 9.94 seconds to spawn the instance on the hypervisor. [ 865.956044] env[69994]: DEBUG nova.compute.manager [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 865.957058] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cbf8951-370c-4f91-b6f2-5b4c66f6b803 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.067488] env[69994]: DEBUG oslo_vmware.api [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]523e8447-d854-4aa4-5f76-5f8f42e6aaa1, 'name': SearchDatastore_Task, 'duration_secs': 0.015917} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.067895] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 866.068267] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] f3268fe1-768c-4d27-828a-5885ce166f90/f3268fe1-768c-4d27-828a-5885ce166f90.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 866.068674] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8c2faf72-d1a2-478d-a83e-2912f5ba4808 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.077028] env[69994]: DEBUG oslo_vmware.api [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Waiting for the task: (returnval){ [ 866.077028] env[69994]: value = "task-3241927" [ 866.077028] env[69994]: _type = "Task" [ 866.077028] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.084438] env[69994]: DEBUG oslo_vmware.api [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': task-3241927, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.411482] env[69994]: DEBUG nova.compute.utils [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 866.416420] env[69994]: DEBUG nova.compute.manager [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 866.416676] env[69994]: DEBUG nova.network.neutron [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 866.473972] env[69994]: DEBUG nova.policy [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b57549e4ae494107a44e8c52ac6fc1cc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'da51434b3d004f1681f76b87d177bd84', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 866.478141] env[69994]: INFO nova.compute.manager [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Took 51.03 seconds to build instance. [ 866.589805] env[69994]: DEBUG oslo_vmware.api [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': task-3241927, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.651351] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f140bef-e34b-4d30-9be0-7c8325a5d5e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 866.651676] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2c73eefd-f997-40ef-a81d-eae17b75d64c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.660391] env[69994]: DEBUG oslo_vmware.api [None req-4f140bef-e34b-4d30-9be0-7c8325a5d5e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for the task: (returnval){ [ 866.660391] env[69994]: value = "task-3241928" [ 866.660391] env[69994]: _type = "Task" [ 866.660391] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.671382] env[69994]: DEBUG oslo_vmware.api [None req-4f140bef-e34b-4d30-9be0-7c8325a5d5e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3241928, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.823746] env[69994]: DEBUG nova.network.neutron [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Successfully created port: 738bec83-fa63-41a5-899f-73cdd8bec4ba {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 866.917250] env[69994]: DEBUG nova.compute.manager [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 866.981460] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4b6d1c19-354a-4598-b383-2ac29c0f03f7 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "14b28a21-1b71-4d7e-bd6c-269f5d588300" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 93.652s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 867.092981] env[69994]: DEBUG oslo_vmware.api [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': task-3241927, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.5188} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.093269] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] f3268fe1-768c-4d27-828a-5885ce166f90/f3268fe1-768c-4d27-828a-5885ce166f90.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 867.094771] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 867.097741] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4a2372fe-3782-4183-93de-a1916d3beef1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.107840] env[69994]: DEBUG oslo_vmware.api [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Waiting for the task: (returnval){ [ 867.107840] env[69994]: value = "task-3241929" [ 867.107840] env[69994]: _type = "Task" [ 867.107840] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.121708] env[69994]: DEBUG oslo_vmware.api [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': task-3241929, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.174019] env[69994]: DEBUG oslo_vmware.api [None req-4f140bef-e34b-4d30-9be0-7c8325a5d5e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3241928, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.431296] env[69994]: INFO nova.virt.block_device [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Booting with volume b7a10ba0-24cb-4e9b-a0d8-098524f6ac67 at /dev/sda [ 867.481936] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-857146b9-5fbf-47f1-827d-119050626d8d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.486475] env[69994]: DEBUG nova.compute.manager [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 867.500457] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e252b46c-3e6b-48af-8558-55704b349e5a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.551305] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fce7a62c-907c-4e6f-9ff6-28e2c9744d43 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.561716] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15ac36db-93cd-4f12-b68b-3227c93c9d46 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.590302] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adbc3e0d-4c05-4ab2-9faf-a8514d6c698a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.627772] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29c3a7f6-832d-47d4-99cc-4d1281235d56 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.637172] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc20db06-8fd5-4413-ad51-7dcefc9683d6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.648252] env[69994]: DEBUG oslo_vmware.api [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': task-3241929, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.261468} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.649300] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 867.650285] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1dc4030-5d16-4898-89fa-5184f159cec4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.680760] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a00fa188-f7c5-4880-9086-2e5e5773bad8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.687947] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3cbe7d0-9076-4df1-9d3f-447403f84034 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.710056] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] f3268fe1-768c-4d27-828a-5885ce166f90/f3268fe1-768c-4d27-828a-5885ce166f90.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 867.718331] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1de423f1-2efd-46b2-acb3-a35ca850275a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.732802] env[69994]: DEBUG oslo_vmware.api [None req-4f140bef-e34b-4d30-9be0-7c8325a5d5e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3241928, 'name': PowerOnVM_Task, 'duration_secs': 0.9107} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.733136] env[69994]: DEBUG nova.virt.block_device [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Updating existing volume attachment record: 672d0096-508e-4863-aebc-1626d6fa69b2 {{(pid=69994) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 867.736903] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf7086ec-42ba-4020-9c57-378d7ac32af6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.740893] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f140bef-e34b-4d30-9be0-7c8325a5d5e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 867.741102] env[69994]: DEBUG nova.compute.manager [None req-4f140bef-e34b-4d30-9be0-7c8325a5d5e7 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 867.742320] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86b73396-d641-448c-b10b-5fd0f568f99f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.758503] env[69994]: DEBUG nova.compute.provider_tree [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 867.762816] env[69994]: DEBUG oslo_vmware.api [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Waiting for the task: (returnval){ [ 867.762816] env[69994]: value = "task-3241930" [ 867.762816] env[69994]: _type = "Task" [ 867.762816] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.777892] env[69994]: DEBUG oslo_vmware.api [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': task-3241930, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.898472] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d25ce6cd-9539-49dc-809e-0c3904d883f2 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquiring lock "cd5a47f2-147b-4e50-980d-8e1c40bc7594" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 867.899234] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d25ce6cd-9539-49dc-809e-0c3904d883f2 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "cd5a47f2-147b-4e50-980d-8e1c40bc7594" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 867.899234] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d25ce6cd-9539-49dc-809e-0c3904d883f2 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquiring lock "cd5a47f2-147b-4e50-980d-8e1c40bc7594-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 867.899629] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d25ce6cd-9539-49dc-809e-0c3904d883f2 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "cd5a47f2-147b-4e50-980d-8e1c40bc7594-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 867.899817] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d25ce6cd-9539-49dc-809e-0c3904d883f2 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "cd5a47f2-147b-4e50-980d-8e1c40bc7594-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 867.903400] env[69994]: INFO nova.compute.manager [None req-d25ce6cd-9539-49dc-809e-0c3904d883f2 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Terminating instance [ 868.015183] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 868.021604] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c7d2cbdf-29ca-4200-86e0-82493e8efbec tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquiring lock "14b28a21-1b71-4d7e-bd6c-269f5d588300" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 868.021604] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c7d2cbdf-29ca-4200-86e0-82493e8efbec tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "14b28a21-1b71-4d7e-bd6c-269f5d588300" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 868.021604] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c7d2cbdf-29ca-4200-86e0-82493e8efbec tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquiring lock "14b28a21-1b71-4d7e-bd6c-269f5d588300-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 868.021604] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c7d2cbdf-29ca-4200-86e0-82493e8efbec tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "14b28a21-1b71-4d7e-bd6c-269f5d588300-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 868.021604] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c7d2cbdf-29ca-4200-86e0-82493e8efbec tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "14b28a21-1b71-4d7e-bd6c-269f5d588300-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 868.024096] env[69994]: INFO nova.compute.manager [None req-c7d2cbdf-29ca-4200-86e0-82493e8efbec tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Terminating instance [ 868.267013] env[69994]: DEBUG nova.scheduler.client.report [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 868.285911] env[69994]: DEBUG oslo_vmware.api [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': task-3241930, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.416025] env[69994]: DEBUG nova.compute.manager [None req-d25ce6cd-9539-49dc-809e-0c3904d883f2 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 868.416025] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d25ce6cd-9539-49dc-809e-0c3904d883f2 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 868.416025] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23e23dd6-0a19-44e1-a14c-dca862b6c9a7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.428290] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d25ce6cd-9539-49dc-809e-0c3904d883f2 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 868.431064] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c1010094-f1e0-4f7e-9aa3-a7360b18f51f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.441140] env[69994]: DEBUG oslo_vmware.api [None req-d25ce6cd-9539-49dc-809e-0c3904d883f2 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 868.441140] env[69994]: value = "task-3241931" [ 868.441140] env[69994]: _type = "Task" [ 868.441140] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.453363] env[69994]: DEBUG oslo_vmware.api [None req-d25ce6cd-9539-49dc-809e-0c3904d883f2 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241931, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.534156] env[69994]: DEBUG nova.compute.manager [None req-c7d2cbdf-29ca-4200-86e0-82493e8efbec tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 868.534429] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c7d2cbdf-29ca-4200-86e0-82493e8efbec tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 868.536191] env[69994]: DEBUG nova.compute.manager [req-d75f3984-b2ac-46d2-982f-c6ca650180b6 req-86cc8c80-998d-4db3-beca-6de700e1630d service nova] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Received event network-vif-plugged-738bec83-fa63-41a5-899f-73cdd8bec4ba {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 868.536401] env[69994]: DEBUG oslo_concurrency.lockutils [req-d75f3984-b2ac-46d2-982f-c6ca650180b6 req-86cc8c80-998d-4db3-beca-6de700e1630d service nova] Acquiring lock "25a64898-568e-4095-aace-f8a564cdf916-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 868.536606] env[69994]: DEBUG oslo_concurrency.lockutils [req-d75f3984-b2ac-46d2-982f-c6ca650180b6 req-86cc8c80-998d-4db3-beca-6de700e1630d service nova] Lock "25a64898-568e-4095-aace-f8a564cdf916-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 868.536775] env[69994]: DEBUG oslo_concurrency.lockutils [req-d75f3984-b2ac-46d2-982f-c6ca650180b6 req-86cc8c80-998d-4db3-beca-6de700e1630d service nova] Lock "25a64898-568e-4095-aace-f8a564cdf916-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 868.536936] env[69994]: DEBUG nova.compute.manager [req-d75f3984-b2ac-46d2-982f-c6ca650180b6 req-86cc8c80-998d-4db3-beca-6de700e1630d service nova] [instance: 25a64898-568e-4095-aace-f8a564cdf916] No waiting events found dispatching network-vif-plugged-738bec83-fa63-41a5-899f-73cdd8bec4ba {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 868.537145] env[69994]: WARNING nova.compute.manager [req-d75f3984-b2ac-46d2-982f-c6ca650180b6 req-86cc8c80-998d-4db3-beca-6de700e1630d service nova] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Received unexpected event network-vif-plugged-738bec83-fa63-41a5-899f-73cdd8bec4ba for instance with vm_state building and task_state block_device_mapping. [ 868.538309] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65b54884-6d49-4819-957a-34edc3436d4d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.548315] env[69994]: DEBUG nova.network.neutron [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Successfully updated port: 738bec83-fa63-41a5-899f-73cdd8bec4ba {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 868.552773] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7d2cbdf-29ca-4200-86e0-82493e8efbec tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 868.553094] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-50daedd4-f664-42f6-a73b-0ab16afb4065 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.562156] env[69994]: DEBUG oslo_vmware.api [None req-c7d2cbdf-29ca-4200-86e0-82493e8efbec tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 868.562156] env[69994]: value = "task-3241932" [ 868.562156] env[69994]: _type = "Task" [ 868.562156] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.575253] env[69994]: DEBUG oslo_vmware.api [None req-c7d2cbdf-29ca-4200-86e0-82493e8efbec tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241932, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.776586] env[69994]: DEBUG oslo_concurrency.lockutils [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.872s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 868.777201] env[69994]: DEBUG nova.compute.manager [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 868.780840] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.778s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 868.782704] env[69994]: INFO nova.compute.claims [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 868.792170] env[69994]: DEBUG oslo_vmware.api [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': task-3241930, 'name': ReconfigVM_Task, 'duration_secs': 0.752635} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.792453] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Reconfigured VM instance instance-0000003c to attach disk [datastore1] f3268fe1-768c-4d27-828a-5885ce166f90/f3268fe1-768c-4d27-828a-5885ce166f90.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 868.793066] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-09346368-038d-4d9b-8e1d-2bdcaee6d428 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.802452] env[69994]: DEBUG oslo_vmware.api [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Waiting for the task: (returnval){ [ 868.802452] env[69994]: value = "task-3241933" [ 868.802452] env[69994]: _type = "Task" [ 868.802452] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.813260] env[69994]: DEBUG oslo_vmware.api [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': task-3241933, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.951961] env[69994]: DEBUG oslo_vmware.api [None req-d25ce6cd-9539-49dc-809e-0c3904d883f2 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241931, 'name': PowerOffVM_Task, 'duration_secs': 0.406829} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.952253] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d25ce6cd-9539-49dc-809e-0c3904d883f2 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 868.952417] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d25ce6cd-9539-49dc-809e-0c3904d883f2 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 868.952681] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8e1ad7bc-0d2f-425c-8485-f2481c1e3b63 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.025914] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d25ce6cd-9539-49dc-809e-0c3904d883f2 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 869.026243] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d25ce6cd-9539-49dc-809e-0c3904d883f2 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 869.026626] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-d25ce6cd-9539-49dc-809e-0c3904d883f2 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Deleting the datastore file [datastore2] cd5a47f2-147b-4e50-980d-8e1c40bc7594 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 869.026704] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1d3de55c-4984-4df0-aa1b-953b19204490 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.033548] env[69994]: DEBUG oslo_vmware.api [None req-d25ce6cd-9539-49dc-809e-0c3904d883f2 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 869.033548] env[69994]: value = "task-3241935" [ 869.033548] env[69994]: _type = "Task" [ 869.033548] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.041905] env[69994]: DEBUG oslo_vmware.api [None req-d25ce6cd-9539-49dc-809e-0c3904d883f2 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241935, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.053611] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Acquiring lock "refresh_cache-25a64898-568e-4095-aace-f8a564cdf916" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.053756] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Acquired lock "refresh_cache-25a64898-568e-4095-aace-f8a564cdf916" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 869.053914] env[69994]: DEBUG nova.network.neutron [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 869.074133] env[69994]: DEBUG oslo_vmware.api [None req-c7d2cbdf-29ca-4200-86e0-82493e8efbec tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241932, 'name': PowerOffVM_Task, 'duration_secs': 0.290862} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.074352] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7d2cbdf-29ca-4200-86e0-82493e8efbec tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 869.074521] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c7d2cbdf-29ca-4200-86e0-82493e8efbec tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 869.074770] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5432c1e7-1c5d-4795-bf97-5c2b70ed9501 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.154833] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c7d2cbdf-29ca-4200-86e0-82493e8efbec tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 869.155154] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c7d2cbdf-29ca-4200-86e0-82493e8efbec tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 869.155377] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7d2cbdf-29ca-4200-86e0-82493e8efbec tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Deleting the datastore file [datastore2] 14b28a21-1b71-4d7e-bd6c-269f5d588300 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 869.155681] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a580d8d0-4427-483d-b19b-04088777923d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.165078] env[69994]: DEBUG oslo_vmware.api [None req-c7d2cbdf-29ca-4200-86e0-82493e8efbec tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for the task: (returnval){ [ 869.165078] env[69994]: value = "task-3241937" [ 869.165078] env[69994]: _type = "Task" [ 869.165078] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.174161] env[69994]: DEBUG oslo_vmware.api [None req-c7d2cbdf-29ca-4200-86e0-82493e8efbec tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241937, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.287753] env[69994]: DEBUG nova.compute.utils [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 869.291279] env[69994]: DEBUG nova.compute.manager [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 869.291447] env[69994]: DEBUG nova.network.neutron [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 869.318558] env[69994]: DEBUG oslo_vmware.api [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': task-3241933, 'name': Rename_Task, 'duration_secs': 0.313631} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.318908] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 869.319210] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c2dc23e3-d5cc-4666-97a0-ad498db18fbf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.329953] env[69994]: DEBUG oslo_vmware.api [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Waiting for the task: (returnval){ [ 869.329953] env[69994]: value = "task-3241938" [ 869.329953] env[69994]: _type = "Task" [ 869.329953] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.339178] env[69994]: DEBUG oslo_vmware.api [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': task-3241938, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.351130] env[69994]: DEBUG nova.policy [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bb1874902bc24959b717674a99e530a9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ee188ea80c9847188df8b8482b7c6ec7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 869.546809] env[69994]: DEBUG oslo_vmware.api [None req-d25ce6cd-9539-49dc-809e-0c3904d883f2 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241935, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.163692} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.550078] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-d25ce6cd-9539-49dc-809e-0c3904d883f2 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 869.550078] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d25ce6cd-9539-49dc-809e-0c3904d883f2 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 869.550078] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d25ce6cd-9539-49dc-809e-0c3904d883f2 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 869.550078] env[69994]: INFO nova.compute.manager [None req-d25ce6cd-9539-49dc-809e-0c3904d883f2 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Took 1.13 seconds to destroy the instance on the hypervisor. [ 869.550078] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d25ce6cd-9539-49dc-809e-0c3904d883f2 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 869.550078] env[69994]: DEBUG nova.compute.manager [-] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 869.550078] env[69994]: DEBUG nova.network.neutron [-] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 869.602324] env[69994]: DEBUG nova.network.neutron [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 869.679452] env[69994]: DEBUG oslo_vmware.api [None req-c7d2cbdf-29ca-4200-86e0-82493e8efbec tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Task: {'id': task-3241937, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.185613} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.680020] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7d2cbdf-29ca-4200-86e0-82493e8efbec tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 869.680313] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c7d2cbdf-29ca-4200-86e0-82493e8efbec tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 869.680531] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c7d2cbdf-29ca-4200-86e0-82493e8efbec tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 869.680704] env[69994]: INFO nova.compute.manager [None req-c7d2cbdf-29ca-4200-86e0-82493e8efbec tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Took 1.15 seconds to destroy the instance on the hypervisor. [ 869.680967] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c7d2cbdf-29ca-4200-86e0-82493e8efbec tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 869.681220] env[69994]: DEBUG nova.compute.manager [-] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 869.681305] env[69994]: DEBUG nova.network.neutron [-] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 869.785710] env[69994]: DEBUG nova.network.neutron [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Successfully created port: 1a8d6b1d-c58d-4d93-9e56-8fb1dffc025c {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 869.792115] env[69994]: DEBUG nova.compute.manager [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 869.844739] env[69994]: DEBUG oslo_vmware.api [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': task-3241938, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.859381] env[69994]: DEBUG nova.compute.manager [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 869.860997] env[69994]: DEBUG nova.virt.hardware [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 869.860997] env[69994]: DEBUG nova.virt.hardware [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 869.860997] env[69994]: DEBUG nova.virt.hardware [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 869.860997] env[69994]: DEBUG nova.virt.hardware [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 869.860997] env[69994]: DEBUG nova.virt.hardware [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 869.860997] env[69994]: DEBUG nova.virt.hardware [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 869.861544] env[69994]: DEBUG nova.virt.hardware [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 869.862245] env[69994]: DEBUG nova.virt.hardware [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 869.862245] env[69994]: DEBUG nova.virt.hardware [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 869.862527] env[69994]: DEBUG nova.virt.hardware [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 869.863130] env[69994]: DEBUG nova.virt.hardware [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 869.864061] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5987855-011b-45d0-a48d-8dc4c741869f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.877595] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da631ad-0a85-445f-9805-924f2dbbbf7d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.934916] env[69994]: DEBUG nova.network.neutron [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Updating instance_info_cache with network_info: [{"id": "738bec83-fa63-41a5-899f-73cdd8bec4ba", "address": "fa:16:3e:70:ee:d0", "network": {"id": "1a5f9db3-2ded-4798-946a-cd79d7da5ae0", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-463906691-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da51434b3d004f1681f76b87d177bd84", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap738bec83-fa", "ovs_interfaceid": "738bec83-fa63-41a5-899f-73cdd8bec4ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.120675] env[69994]: DEBUG nova.compute.manager [req-fc350909-7f06-40be-a963-14b0ccbefd10 req-0bb2e9de-3514-484c-88a4-a4614b5e88e2 service nova] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Received event network-vif-deleted-8113f947-c14b-4c98-9e15-99bdde32f01c {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 870.121399] env[69994]: INFO nova.compute.manager [req-fc350909-7f06-40be-a963-14b0ccbefd10 req-0bb2e9de-3514-484c-88a4-a4614b5e88e2 service nova] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Neutron deleted interface 8113f947-c14b-4c98-9e15-99bdde32f01c; detaching it from the instance and deleting it from the info cache [ 870.121399] env[69994]: DEBUG nova.network.neutron [req-fc350909-7f06-40be-a963-14b0ccbefd10 req-0bb2e9de-3514-484c-88a4-a4614b5e88e2 service nova] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.346785] env[69994]: DEBUG oslo_vmware.api [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': task-3241938, 'name': PowerOnVM_Task, 'duration_secs': 0.590875} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.347608] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 870.347854] env[69994]: INFO nova.compute.manager [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Took 7.05 seconds to spawn the instance on the hypervisor. [ 870.348052] env[69994]: DEBUG nova.compute.manager [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 870.348873] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40d00418-5375-4a7e-9a30-d3f2b8bc95dc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.353313] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f6ca036-d47e-4355-9030-57413cd57035 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.367521] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b695917b-6381-4de7-9223-300ffee05a34 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.411320] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a77870f-6435-4bf0-bafd-d0b63ef0c36b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.421561] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aef7d400-5725-4432-a4ac-9edc6ac27c62 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.437867] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Releasing lock "refresh_cache-25a64898-568e-4095-aace-f8a564cdf916" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 870.438194] env[69994]: DEBUG nova.compute.manager [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Instance network_info: |[{"id": "738bec83-fa63-41a5-899f-73cdd8bec4ba", "address": "fa:16:3e:70:ee:d0", "network": {"id": "1a5f9db3-2ded-4798-946a-cd79d7da5ae0", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-463906691-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da51434b3d004f1681f76b87d177bd84", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap738bec83-fa", "ovs_interfaceid": "738bec83-fa63-41a5-899f-73cdd8bec4ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 870.438687] env[69994]: DEBUG nova.compute.provider_tree [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 870.440500] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:70:ee:d0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e547d234-640c-449b-8279-0b16f75d6627', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '738bec83-fa63-41a5-899f-73cdd8bec4ba', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 870.447560] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Creating folder: Project (da51434b3d004f1681f76b87d177bd84). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 870.448978] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-97699df9-ec6e-4f57-bbcc-bdb265e02782 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.465969] env[69994]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 870.465969] env[69994]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=69994) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 870.466118] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Folder already exists: Project (da51434b3d004f1681f76b87d177bd84). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 870.466346] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Creating folder: Instances. Parent ref: group-v647860. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 870.466599] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f06fd93e-18ac-405c-9275-807727373e99 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.478381] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Created folder: Instances in parent group-v647860. [ 870.478723] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 870.478998] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 870.479181] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ff8ad0e7-1afd-4d30-8360-d39c5820929c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.500535] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 870.500535] env[69994]: value = "task-3241941" [ 870.500535] env[69994]: _type = "Task" [ 870.500535] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.509362] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241941, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.511763] env[69994]: DEBUG nova.network.neutron [-] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.626408] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8c013c82-6f98-4cdb-868a-88f889870618 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.637767] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e712c8f1-29f3-4c87-a47c-d205c748279d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.650694] env[69994]: DEBUG nova.network.neutron [-] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.678263] env[69994]: DEBUG nova.compute.manager [req-fc350909-7f06-40be-a963-14b0ccbefd10 req-0bb2e9de-3514-484c-88a4-a4614b5e88e2 service nova] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Detach interface failed, port_id=8113f947-c14b-4c98-9e15-99bdde32f01c, reason: Instance cd5a47f2-147b-4e50-980d-8e1c40bc7594 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 870.804478] env[69994]: DEBUG nova.compute.manager [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 870.835677] env[69994]: DEBUG nova.virt.hardware [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 870.835943] env[69994]: DEBUG nova.virt.hardware [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 870.836257] env[69994]: DEBUG nova.virt.hardware [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 870.836385] env[69994]: DEBUG nova.virt.hardware [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 870.836532] env[69994]: DEBUG nova.virt.hardware [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 870.836679] env[69994]: DEBUG nova.virt.hardware [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 870.836891] env[69994]: DEBUG nova.virt.hardware [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 870.837103] env[69994]: DEBUG nova.virt.hardware [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 870.837303] env[69994]: DEBUG nova.virt.hardware [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 870.837447] env[69994]: DEBUG nova.virt.hardware [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 870.837618] env[69994]: DEBUG nova.virt.hardware [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 870.838711] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8cc2aee-63f4-4554-9453-ac26e80e7b38 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.847678] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d976f80-a152-4730-9604-cdf95e854e1c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.872597] env[69994]: DEBUG nova.compute.manager [req-d1f2b01d-2242-4a77-bc13-094213739ac1 req-78d38194-960e-4272-b970-5a2df31f2089 service nova] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Received event network-changed-738bec83-fa63-41a5-899f-73cdd8bec4ba {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 870.872794] env[69994]: DEBUG nova.compute.manager [req-d1f2b01d-2242-4a77-bc13-094213739ac1 req-78d38194-960e-4272-b970-5a2df31f2089 service nova] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Refreshing instance network info cache due to event network-changed-738bec83-fa63-41a5-899f-73cdd8bec4ba. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 870.873018] env[69994]: DEBUG oslo_concurrency.lockutils [req-d1f2b01d-2242-4a77-bc13-094213739ac1 req-78d38194-960e-4272-b970-5a2df31f2089 service nova] Acquiring lock "refresh_cache-25a64898-568e-4095-aace-f8a564cdf916" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.873167] env[69994]: DEBUG oslo_concurrency.lockutils [req-d1f2b01d-2242-4a77-bc13-094213739ac1 req-78d38194-960e-4272-b970-5a2df31f2089 service nova] Acquired lock "refresh_cache-25a64898-568e-4095-aace-f8a564cdf916" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 870.873481] env[69994]: DEBUG nova.network.neutron [req-d1f2b01d-2242-4a77-bc13-094213739ac1 req-78d38194-960e-4272-b970-5a2df31f2089 service nova] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Refreshing network info cache for port 738bec83-fa63-41a5-899f-73cdd8bec4ba {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 870.878096] env[69994]: INFO nova.compute.manager [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Took 50.27 seconds to build instance. [ 870.949664] env[69994]: DEBUG nova.scheduler.client.report [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 871.012204] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241941, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.014811] env[69994]: INFO nova.compute.manager [-] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Took 1.47 seconds to deallocate network for instance. [ 871.152871] env[69994]: INFO nova.compute.manager [-] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Took 1.47 seconds to deallocate network for instance. [ 871.381320] env[69994]: DEBUG oslo_concurrency.lockutils [None req-08e61bc2-8034-4f2e-9449-e2005baf0bde tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Lock "f3268fe1-768c-4d27-828a-5885ce166f90" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.421s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 871.383165] env[69994]: DEBUG nova.network.neutron [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Successfully updated port: 1a8d6b1d-c58d-4d93-9e56-8fb1dffc025c {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 871.455208] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.674s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 871.455781] env[69994]: DEBUG nova.compute.manager [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 871.460207] env[69994]: DEBUG oslo_concurrency.lockutils [None req-924d5545-1778-46c6-838a-748c49f55cf3 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.970s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 871.460207] env[69994]: DEBUG nova.objects.instance [None req-924d5545-1778-46c6-838a-748c49f55cf3 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Lazy-loading 'resources' on Instance uuid dca638aa-c491-431f-a0e5-d02bd76705ad {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 871.518276] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241941, 'name': CreateVM_Task, 'duration_secs': 0.575401} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.518276] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 871.519594] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'attachment_id': '672d0096-508e-4863-aebc-1626d6fa69b2', 'disk_bus': None, 'guest_format': None, 'delete_on_termination': True, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647869', 'volume_id': 'b7a10ba0-24cb-4e9b-a0d8-098524f6ac67', 'name': 'volume-b7a10ba0-24cb-4e9b-a0d8-098524f6ac67', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '25a64898-568e-4095-aace-f8a564cdf916', 'attached_at': '', 'detached_at': '', 'volume_id': 'b7a10ba0-24cb-4e9b-a0d8-098524f6ac67', 'serial': 'b7a10ba0-24cb-4e9b-a0d8-098524f6ac67'}, 'mount_device': '/dev/sda', 'boot_index': 0, 'device_type': None, 'volume_type': None}], 'swap': None} {{(pid=69994) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 871.520610] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Root volume attach. Driver type: vmdk {{(pid=69994) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 871.521581] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d25ce6cd-9539-49dc-809e-0c3904d883f2 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 871.522097] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2526b5da-e436-47ce-8091-64a09b851f33 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.534019] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c795167-f026-46a1-ab53-3238c2597e07 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.537649] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fd35660-dfff-4344-8a65-82f010f24d0c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.546113] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-b5daec12-7a89-496c-a03f-7ff2708c9ed0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.555254] env[69994]: DEBUG oslo_vmware.api [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Waiting for the task: (returnval){ [ 871.555254] env[69994]: value = "task-3241942" [ 871.555254] env[69994]: _type = "Task" [ 871.555254] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.565105] env[69994]: DEBUG oslo_vmware.api [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Task: {'id': task-3241942, 'name': RelocateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.659684] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c7d2cbdf-29ca-4200-86e0-82493e8efbec tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 871.783422] env[69994]: DEBUG nova.network.neutron [req-d1f2b01d-2242-4a77-bc13-094213739ac1 req-78d38194-960e-4272-b970-5a2df31f2089 service nova] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Updated VIF entry in instance network info cache for port 738bec83-fa63-41a5-899f-73cdd8bec4ba. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 871.783422] env[69994]: DEBUG nova.network.neutron [req-d1f2b01d-2242-4a77-bc13-094213739ac1 req-78d38194-960e-4272-b970-5a2df31f2089 service nova] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Updating instance_info_cache with network_info: [{"id": "738bec83-fa63-41a5-899f-73cdd8bec4ba", "address": "fa:16:3e:70:ee:d0", "network": {"id": "1a5f9db3-2ded-4798-946a-cd79d7da5ae0", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-463906691-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da51434b3d004f1681f76b87d177bd84", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap738bec83-fa", "ovs_interfaceid": "738bec83-fa63-41a5-899f-73cdd8bec4ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.888524] env[69994]: DEBUG oslo_concurrency.lockutils [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "refresh_cache-0b975ce0-40a4-48a9-a046-66227636d496" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.888524] env[69994]: DEBUG oslo_concurrency.lockutils [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquired lock "refresh_cache-0b975ce0-40a4-48a9-a046-66227636d496" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 871.890596] env[69994]: DEBUG nova.network.neutron [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 871.968088] env[69994]: DEBUG nova.compute.utils [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 871.969555] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "ef37ce64-2c26-4080-899a-6d9dbb5850c9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 871.969789] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "ef37ce64-2c26-4080-899a-6d9dbb5850c9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 871.972072] env[69994]: DEBUG nova.compute.manager [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 871.972178] env[69994]: DEBUG nova.network.neutron [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 872.046755] env[69994]: DEBUG nova.policy [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a50297ffebb845cdb950de24f60cb55a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f035f8fbac46483fb4d70f166df319b6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 872.075403] env[69994]: DEBUG oslo_vmware.api [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Task: {'id': task-3241942, 'name': RelocateVM_Task} progress is 42%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.202165] env[69994]: DEBUG nova.compute.manager [req-014ff95f-b07d-48d4-9ef7-10d518dc3909 req-76965969-3abf-42ef-adcd-47436c1c89cf service nova] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Received event network-vif-deleted-77897887-9eb8-476e-82c5-c0ab7f57adad {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 872.285507] env[69994]: DEBUG oslo_concurrency.lockutils [req-d1f2b01d-2242-4a77-bc13-094213739ac1 req-78d38194-960e-4272-b970-5a2df31f2089 service nova] Releasing lock "refresh_cache-25a64898-568e-4095-aace-f8a564cdf916" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 872.431129] env[69994]: DEBUG nova.network.neutron [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 872.470657] env[69994]: DEBUG nova.compute.manager [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 872.476566] env[69994]: DEBUG nova.compute.manager [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 872.566890] env[69994]: DEBUG nova.network.neutron [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Successfully created port: d778d0d8-5cbc-478a-a9e8-73d16c874a6f {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 872.575811] env[69994]: DEBUG oslo_vmware.api [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Task: {'id': task-3241942, 'name': RelocateVM_Task} progress is 54%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.592846] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3dc4ccb-52eb-4de5-aa17-ab9f4cbc527d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.603761] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c31faaea-85d3-4981-98fb-7749400bd12e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.639583] env[69994]: DEBUG nova.network.neutron [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Updating instance_info_cache with network_info: [{"id": "1a8d6b1d-c58d-4d93-9e56-8fb1dffc025c", "address": "fa:16:3e:df:16:b3", "network": {"id": "14e5b992-c393-4acc-ab6d-ad5983fe2729", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1321568807-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee188ea80c9847188df8b8482b7c6ec7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a8d6b1d-c5", "ovs_interfaceid": "1a8d6b1d-c58d-4d93-9e56-8fb1dffc025c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.642096] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d13bf3e7-c73a-4085-81c5-232462a6b401 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.655131] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c4fd9e7-9677-49e6-a93b-03f397a27c9d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.672353] env[69994]: DEBUG nova.compute.provider_tree [None req-924d5545-1778-46c6-838a-748c49f55cf3 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 872.962751] env[69994]: DEBUG nova.compute.manager [req-d3d993ce-5c9b-485b-bd73-7996e10311d0 req-a29a7394-173c-45d0-8ab5-5e044f3baccd service nova] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Received event network-vif-plugged-1a8d6b1d-c58d-4d93-9e56-8fb1dffc025c {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 872.962877] env[69994]: DEBUG oslo_concurrency.lockutils [req-d3d993ce-5c9b-485b-bd73-7996e10311d0 req-a29a7394-173c-45d0-8ab5-5e044f3baccd service nova] Acquiring lock "0b975ce0-40a4-48a9-a046-66227636d496-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 872.964790] env[69994]: DEBUG oslo_concurrency.lockutils [req-d3d993ce-5c9b-485b-bd73-7996e10311d0 req-a29a7394-173c-45d0-8ab5-5e044f3baccd service nova] Lock "0b975ce0-40a4-48a9-a046-66227636d496-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 872.964790] env[69994]: DEBUG oslo_concurrency.lockutils [req-d3d993ce-5c9b-485b-bd73-7996e10311d0 req-a29a7394-173c-45d0-8ab5-5e044f3baccd service nova] Lock "0b975ce0-40a4-48a9-a046-66227636d496-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 872.964790] env[69994]: DEBUG nova.compute.manager [req-d3d993ce-5c9b-485b-bd73-7996e10311d0 req-a29a7394-173c-45d0-8ab5-5e044f3baccd service nova] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] No waiting events found dispatching network-vif-plugged-1a8d6b1d-c58d-4d93-9e56-8fb1dffc025c {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 872.964790] env[69994]: WARNING nova.compute.manager [req-d3d993ce-5c9b-485b-bd73-7996e10311d0 req-a29a7394-173c-45d0-8ab5-5e044f3baccd service nova] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Received unexpected event network-vif-plugged-1a8d6b1d-c58d-4d93-9e56-8fb1dffc025c for instance with vm_state building and task_state spawning. [ 872.964790] env[69994]: DEBUG nova.compute.manager [req-d3d993ce-5c9b-485b-bd73-7996e10311d0 req-a29a7394-173c-45d0-8ab5-5e044f3baccd service nova] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Received event network-changed-1a8d6b1d-c58d-4d93-9e56-8fb1dffc025c {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 872.964790] env[69994]: DEBUG nova.compute.manager [req-d3d993ce-5c9b-485b-bd73-7996e10311d0 req-a29a7394-173c-45d0-8ab5-5e044f3baccd service nova] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Refreshing instance network info cache due to event network-changed-1a8d6b1d-c58d-4d93-9e56-8fb1dffc025c. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 872.964790] env[69994]: DEBUG oslo_concurrency.lockutils [req-d3d993ce-5c9b-485b-bd73-7996e10311d0 req-a29a7394-173c-45d0-8ab5-5e044f3baccd service nova] Acquiring lock "refresh_cache-0b975ce0-40a4-48a9-a046-66227636d496" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.013324] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 873.071170] env[69994]: DEBUG oslo_vmware.api [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Task: {'id': task-3241942, 'name': RelocateVM_Task} progress is 67%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.141794] env[69994]: INFO nova.compute.manager [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Rebuilding instance [ 873.148685] env[69994]: DEBUG oslo_concurrency.lockutils [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Releasing lock "refresh_cache-0b975ce0-40a4-48a9-a046-66227636d496" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 873.148685] env[69994]: DEBUG nova.compute.manager [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Instance network_info: |[{"id": "1a8d6b1d-c58d-4d93-9e56-8fb1dffc025c", "address": "fa:16:3e:df:16:b3", "network": {"id": "14e5b992-c393-4acc-ab6d-ad5983fe2729", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1321568807-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee188ea80c9847188df8b8482b7c6ec7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a8d6b1d-c5", "ovs_interfaceid": "1a8d6b1d-c58d-4d93-9e56-8fb1dffc025c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 873.148685] env[69994]: DEBUG oslo_concurrency.lockutils [req-d3d993ce-5c9b-485b-bd73-7996e10311d0 req-a29a7394-173c-45d0-8ab5-5e044f3baccd service nova] Acquired lock "refresh_cache-0b975ce0-40a4-48a9-a046-66227636d496" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 873.148685] env[69994]: DEBUG nova.network.neutron [req-d3d993ce-5c9b-485b-bd73-7996e10311d0 req-a29a7394-173c-45d0-8ab5-5e044f3baccd service nova] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Refreshing network info cache for port 1a8d6b1d-c58d-4d93-9e56-8fb1dffc025c {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 873.149266] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:16:b3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e31264e2-3e0a-4dfb-ba1f-6389d7d47548', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1a8d6b1d-c58d-4d93-9e56-8fb1dffc025c', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 873.156934] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 873.157831] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 873.160033] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-49934ec5-9d86-4a63-93be-828e85b70ed3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.176538] env[69994]: DEBUG nova.scheduler.client.report [None req-924d5545-1778-46c6-838a-748c49f55cf3 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 873.191675] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 873.191675] env[69994]: value = "task-3241943" [ 873.191675] env[69994]: _type = "Task" [ 873.191675] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.203159] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241943, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.208270] env[69994]: DEBUG nova.compute.manager [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 873.210234] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da6d086f-2260-4f84-a67a-01c46051a0c6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.484634] env[69994]: DEBUG nova.compute.manager [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 873.521807] env[69994]: DEBUG nova.virt.hardware [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 873.522101] env[69994]: DEBUG nova.virt.hardware [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 873.522271] env[69994]: DEBUG nova.virt.hardware [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 873.522471] env[69994]: DEBUG nova.virt.hardware [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 873.522640] env[69994]: DEBUG nova.virt.hardware [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 873.522802] env[69994]: DEBUG nova.virt.hardware [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 873.523061] env[69994]: DEBUG nova.virt.hardware [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 873.523226] env[69994]: DEBUG nova.virt.hardware [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 873.523409] env[69994]: DEBUG nova.virt.hardware [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 873.523579] env[69994]: DEBUG nova.virt.hardware [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 873.523773] env[69994]: DEBUG nova.virt.hardware [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 873.524925] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdf46023-d48c-4f0c-a757-e9fd78de262b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.535596] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b9bcc10-b4c0-413e-aafa-a79721e92921 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.570509] env[69994]: DEBUG oslo_vmware.api [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Task: {'id': task-3241942, 'name': RelocateVM_Task} progress is 81%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.681594] env[69994]: DEBUG oslo_concurrency.lockutils [None req-924d5545-1778-46c6-838a-748c49f55cf3 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.222s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 873.684023] env[69994]: DEBUG oslo_concurrency.lockutils [None req-400fdda3-6f30-4f28-abdc-76cdfe551237 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.309s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 873.684295] env[69994]: DEBUG nova.objects.instance [None req-400fdda3-6f30-4f28-abdc-76cdfe551237 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Lazy-loading 'resources' on Instance uuid 1d548f54-4ffa-4299-9212-717350558ad4 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 873.709615] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241943, 'name': CreateVM_Task, 'duration_secs': 0.385381} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.711485] env[69994]: INFO nova.scheduler.client.report [None req-924d5545-1778-46c6-838a-748c49f55cf3 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Deleted allocations for instance dca638aa-c491-431f-a0e5-d02bd76705ad [ 873.715371] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 873.716197] env[69994]: DEBUG oslo_concurrency.lockutils [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.716390] env[69994]: DEBUG oslo_concurrency.lockutils [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 873.716837] env[69994]: DEBUG oslo_concurrency.lockutils [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 873.717114] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c31c1ae-1c56-4cea-96ff-c4b0d04455a7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.726313] env[69994]: DEBUG oslo_vmware.api [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 873.726313] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52e6b835-2f61-dba3-6d41-cda7111fe514" [ 873.726313] env[69994]: _type = "Task" [ 873.726313] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.737313] env[69994]: DEBUG oslo_vmware.api [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52e6b835-2f61-dba3-6d41-cda7111fe514, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.891275] env[69994]: DEBUG nova.network.neutron [req-d3d993ce-5c9b-485b-bd73-7996e10311d0 req-a29a7394-173c-45d0-8ab5-5e044f3baccd service nova] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Updated VIF entry in instance network info cache for port 1a8d6b1d-c58d-4d93-9e56-8fb1dffc025c. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 873.891741] env[69994]: DEBUG nova.network.neutron [req-d3d993ce-5c9b-485b-bd73-7996e10311d0 req-a29a7394-173c-45d0-8ab5-5e044f3baccd service nova] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Updating instance_info_cache with network_info: [{"id": "1a8d6b1d-c58d-4d93-9e56-8fb1dffc025c", "address": "fa:16:3e:df:16:b3", "network": {"id": "14e5b992-c393-4acc-ab6d-ad5983fe2729", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1321568807-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee188ea80c9847188df8b8482b7c6ec7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a8d6b1d-c5", "ovs_interfaceid": "1a8d6b1d-c58d-4d93-9e56-8fb1dffc025c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 874.068825] env[69994]: DEBUG oslo_vmware.api [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Task: {'id': task-3241942, 'name': RelocateVM_Task} progress is 95%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.224063] env[69994]: DEBUG oslo_concurrency.lockutils [None req-924d5545-1778-46c6-838a-748c49f55cf3 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Lock "dca638aa-c491-431f-a0e5-d02bd76705ad" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.267s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 874.225132] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 874.225373] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-86e0a653-46da-47ea-b35e-061b7ca1bc1b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.239473] env[69994]: DEBUG oslo_vmware.api [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52e6b835-2f61-dba3-6d41-cda7111fe514, 'name': SearchDatastore_Task, 'duration_secs': 0.024672} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.241563] env[69994]: DEBUG oslo_concurrency.lockutils [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 874.241815] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 874.242114] env[69994]: DEBUG oslo_concurrency.lockutils [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.242324] env[69994]: DEBUG oslo_concurrency.lockutils [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 874.242559] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 874.242930] env[69994]: DEBUG oslo_vmware.api [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Waiting for the task: (returnval){ [ 874.242930] env[69994]: value = "task-3241944" [ 874.242930] env[69994]: _type = "Task" [ 874.242930] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.243515] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-685f8257-c4f4-48ed-8863-d71088c312e6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.254679] env[69994]: DEBUG oslo_vmware.api [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': task-3241944, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.270051] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 874.270448] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 874.271214] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0940f1c-4d1a-40e9-b4ff-98e572e4c08a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.281348] env[69994]: DEBUG oslo_vmware.api [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 874.281348] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5268253a-d324-a560-2d2a-1303a61dd4a0" [ 874.281348] env[69994]: _type = "Task" [ 874.281348] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.289248] env[69994]: DEBUG oslo_vmware.api [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5268253a-d324-a560-2d2a-1303a61dd4a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.356535] env[69994]: DEBUG nova.compute.manager [req-00691d8b-dee7-4513-9b21-aca479e18968 req-545e9a02-072d-4117-a097-6fff53153166 service nova] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Received event network-vif-plugged-d778d0d8-5cbc-478a-a9e8-73d16c874a6f {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 874.356977] env[69994]: DEBUG oslo_concurrency.lockutils [req-00691d8b-dee7-4513-9b21-aca479e18968 req-545e9a02-072d-4117-a097-6fff53153166 service nova] Acquiring lock "7ef329a2-4d61-428a-8a43-f309a1e953d6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 874.357562] env[69994]: DEBUG oslo_concurrency.lockutils [req-00691d8b-dee7-4513-9b21-aca479e18968 req-545e9a02-072d-4117-a097-6fff53153166 service nova] Lock "7ef329a2-4d61-428a-8a43-f309a1e953d6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 874.357778] env[69994]: DEBUG oslo_concurrency.lockutils [req-00691d8b-dee7-4513-9b21-aca479e18968 req-545e9a02-072d-4117-a097-6fff53153166 service nova] Lock "7ef329a2-4d61-428a-8a43-f309a1e953d6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 874.357924] env[69994]: DEBUG nova.compute.manager [req-00691d8b-dee7-4513-9b21-aca479e18968 req-545e9a02-072d-4117-a097-6fff53153166 service nova] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] No waiting events found dispatching network-vif-plugged-d778d0d8-5cbc-478a-a9e8-73d16c874a6f {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 874.358111] env[69994]: WARNING nova.compute.manager [req-00691d8b-dee7-4513-9b21-aca479e18968 req-545e9a02-072d-4117-a097-6fff53153166 service nova] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Received unexpected event network-vif-plugged-d778d0d8-5cbc-478a-a9e8-73d16c874a6f for instance with vm_state building and task_state spawning. [ 874.394972] env[69994]: DEBUG oslo_concurrency.lockutils [req-d3d993ce-5c9b-485b-bd73-7996e10311d0 req-a29a7394-173c-45d0-8ab5-5e044f3baccd service nova] Releasing lock "refresh_cache-0b975ce0-40a4-48a9-a046-66227636d496" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 874.547652] env[69994]: DEBUG nova.network.neutron [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Successfully updated port: d778d0d8-5cbc-478a-a9e8-73d16c874a6f {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 874.570364] env[69994]: DEBUG oslo_vmware.api [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Task: {'id': task-3241942, 'name': RelocateVM_Task} progress is 97%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.707585] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bf07591-4678-427d-847b-22b726c58365 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.715641] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbbb2fee-6577-4ea3-b1f7-cf58bfde38e9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.751366] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a2902b3-6172-4c00-a5c1-a3547696e5d4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.762616] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c77f310b-d6c9-4ac6-9936-6c8a6cf745e0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.766675] env[69994]: DEBUG oslo_vmware.api [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': task-3241944, 'name': PowerOffVM_Task, 'duration_secs': 0.259899} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.767039] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 874.767773] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 874.768916] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8841801c-4757-4391-8e6a-611cf84d235d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.779346] env[69994]: DEBUG nova.compute.provider_tree [None req-400fdda3-6f30-4f28-abdc-76cdfe551237 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 874.787243] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 874.787831] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fb64ddbf-2840-4744-be26-e6f9dba8352e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.793713] env[69994]: DEBUG oslo_vmware.api [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5268253a-d324-a560-2d2a-1303a61dd4a0, 'name': SearchDatastore_Task, 'duration_secs': 0.033616} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.794737] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba74a1c9-f69a-435a-a374-eea75942d8a2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.799945] env[69994]: DEBUG oslo_vmware.api [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 874.799945] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52469fc4-9be6-b967-e114-d1a6ab0c524d" [ 874.799945] env[69994]: _type = "Task" [ 874.799945] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.807890] env[69994]: DEBUG oslo_vmware.api [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52469fc4-9be6-b967-e114-d1a6ab0c524d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.818817] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 874.819039] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 874.819221] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Deleting the datastore file [datastore1] f3268fe1-768c-4d27-828a-5885ce166f90 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 874.819799] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0ec37425-1906-4ce9-bded-cf7282215523 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.826503] env[69994]: DEBUG oslo_vmware.api [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Waiting for the task: (returnval){ [ 874.826503] env[69994]: value = "task-3241946" [ 874.826503] env[69994]: _type = "Task" [ 874.826503] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.835713] env[69994]: DEBUG oslo_vmware.api [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': task-3241946, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.050986] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Acquiring lock "refresh_cache-7ef329a2-4d61-428a-8a43-f309a1e953d6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.051107] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Acquired lock "refresh_cache-7ef329a2-4d61-428a-8a43-f309a1e953d6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 875.051275] env[69994]: DEBUG nova.network.neutron [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 875.069856] env[69994]: DEBUG oslo_vmware.api [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Task: {'id': task-3241942, 'name': RelocateVM_Task} progress is 98%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.283139] env[69994]: DEBUG nova.scheduler.client.report [None req-400fdda3-6f30-4f28-abdc-76cdfe551237 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 875.312773] env[69994]: DEBUG oslo_vmware.api [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52469fc4-9be6-b967-e114-d1a6ab0c524d, 'name': SearchDatastore_Task, 'duration_secs': 0.010722} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.313083] env[69994]: DEBUG oslo_concurrency.lockutils [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 875.313342] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 0b975ce0-40a4-48a9-a046-66227636d496/0b975ce0-40a4-48a9-a046-66227636d496.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 875.313623] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6fb81475-53b0-41ea-bad9-92408911de85 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.320296] env[69994]: DEBUG oslo_concurrency.lockutils [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Acquiring lock "80705dfe-4768-4f35-8acf-316b15814f78" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 875.320540] env[69994]: DEBUG oslo_concurrency.lockutils [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Lock "80705dfe-4768-4f35-8acf-316b15814f78" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 875.320785] env[69994]: DEBUG oslo_vmware.api [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 875.320785] env[69994]: value = "task-3241947" [ 875.320785] env[69994]: _type = "Task" [ 875.320785] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.329206] env[69994]: DEBUG oslo_vmware.api [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3241947, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.336396] env[69994]: DEBUG oslo_vmware.api [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': task-3241946, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.133657} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.336621] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 875.336835] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 875.336977] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 875.572429] env[69994]: DEBUG oslo_vmware.api [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Task: {'id': task-3241942, 'name': RelocateVM_Task, 'duration_secs': 3.604835} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.572717] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Volume attach. Driver type: vmdk {{(pid=69994) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 875.572923] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647869', 'volume_id': 'b7a10ba0-24cb-4e9b-a0d8-098524f6ac67', 'name': 'volume-b7a10ba0-24cb-4e9b-a0d8-098524f6ac67', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '25a64898-568e-4095-aace-f8a564cdf916', 'attached_at': '', 'detached_at': '', 'volume_id': 'b7a10ba0-24cb-4e9b-a0d8-098524f6ac67', 'serial': 'b7a10ba0-24cb-4e9b-a0d8-098524f6ac67'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 875.574403] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-125c48df-71da-44b0-af92-ec706fc191ad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.593641] env[69994]: DEBUG nova.network.neutron [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 875.596227] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47805087-c8e9-4e80-83f3-ba040dbe4e8e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.620559] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] volume-b7a10ba0-24cb-4e9b-a0d8-098524f6ac67/volume-b7a10ba0-24cb-4e9b-a0d8-098524f6ac67.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 875.623325] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b317bd9a-11c1-4509-92bd-e6f2ddb226a7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.646547] env[69994]: DEBUG oslo_vmware.api [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Waiting for the task: (returnval){ [ 875.646547] env[69994]: value = "task-3241948" [ 875.646547] env[69994]: _type = "Task" [ 875.646547] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.657800] env[69994]: DEBUG oslo_vmware.api [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Task: {'id': task-3241948, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.788717] env[69994]: DEBUG oslo_concurrency.lockutils [None req-400fdda3-6f30-4f28-abdc-76cdfe551237 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.105s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 875.791936] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.958s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 875.792840] env[69994]: INFO nova.compute.claims [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 875.822155] env[69994]: INFO nova.scheduler.client.report [None req-400fdda3-6f30-4f28-abdc-76cdfe551237 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Deleted allocations for instance 1d548f54-4ffa-4299-9212-717350558ad4 [ 875.833459] env[69994]: DEBUG oslo_vmware.api [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3241947, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.461741} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.833719] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 0b975ce0-40a4-48a9-a046-66227636d496/0b975ce0-40a4-48a9-a046-66227636d496.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 875.833937] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 875.834205] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-73f2dd1c-f21e-449c-97bc-37c7f62815fd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.845070] env[69994]: DEBUG oslo_vmware.api [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 875.845070] env[69994]: value = "task-3241949" [ 875.845070] env[69994]: _type = "Task" [ 875.845070] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.855094] env[69994]: DEBUG oslo_vmware.api [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3241949, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.856058] env[69994]: DEBUG nova.network.neutron [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Updating instance_info_cache with network_info: [{"id": "d778d0d8-5cbc-478a-a9e8-73d16c874a6f", "address": "fa:16:3e:7b:7d:46", "network": {"id": "ffc05707-d9b8-4f04-8df9-5c384e0942c5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1300021035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f035f8fbac46483fb4d70f166df319b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd778d0d8-5c", "ovs_interfaceid": "d778d0d8-5cbc-478a-a9e8-73d16c874a6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.157884] env[69994]: DEBUG oslo_vmware.api [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Task: {'id': task-3241948, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.333963] env[69994]: DEBUG oslo_concurrency.lockutils [None req-400fdda3-6f30-4f28-abdc-76cdfe551237 tempest-FloatingIPsAssociationTestJSON-1696154049 tempest-FloatingIPsAssociationTestJSON-1696154049-project-member] Lock "1d548f54-4ffa-4299-9212-717350558ad4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.603s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 876.360819] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Releasing lock "refresh_cache-7ef329a2-4d61-428a-8a43-f309a1e953d6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 876.360819] env[69994]: DEBUG nova.compute.manager [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Instance network_info: |[{"id": "d778d0d8-5cbc-478a-a9e8-73d16c874a6f", "address": "fa:16:3e:7b:7d:46", "network": {"id": "ffc05707-d9b8-4f04-8df9-5c384e0942c5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1300021035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f035f8fbac46483fb4d70f166df319b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd778d0d8-5c", "ovs_interfaceid": "d778d0d8-5cbc-478a-a9e8-73d16c874a6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 876.360819] env[69994]: DEBUG oslo_vmware.api [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3241949, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070117} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.361240] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7b:7d:46', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '35ac9709-fd8b-4630-897a-68ed629d1b11', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd778d0d8-5cbc-478a-a9e8-73d16c874a6f', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 876.369298] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 876.369608] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 876.372299] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 876.373070] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0ac8ced-b928-407b-80d0-ded3b145490c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.376155] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-37789bf6-85cd-4f80-86ac-1e1d67fc7a27 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.394816] env[69994]: DEBUG nova.virt.hardware [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 876.395162] env[69994]: DEBUG nova.virt.hardware [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 876.395418] env[69994]: DEBUG nova.virt.hardware [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 876.395692] env[69994]: DEBUG nova.virt.hardware [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 876.395949] env[69994]: DEBUG nova.virt.hardware [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 876.396214] env[69994]: DEBUG nova.virt.hardware [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 876.396478] env[69994]: DEBUG nova.virt.hardware [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 876.396720] env[69994]: DEBUG nova.virt.hardware [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 876.396974] env[69994]: DEBUG nova.virt.hardware [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 876.397262] env[69994]: DEBUG nova.virt.hardware [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 876.397544] env[69994]: DEBUG nova.virt.hardware [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 876.399454] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76a4a70c-b3cf-4e28-9e61-d6973ad19815 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.424401] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] 0b975ce0-40a4-48a9-a046-66227636d496/0b975ce0-40a4-48a9-a046-66227636d496.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 876.427929] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8b096888-1617-4436-afc6-746fb60ee956 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.444553] env[69994]: DEBUG nova.compute.manager [req-40990720-b3d8-41de-857c-2b9124a3d354 req-249e9fb6-8ea8-4553-bf66-37cf0b63803e service nova] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Received event network-changed-d778d0d8-5cbc-478a-a9e8-73d16c874a6f {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 876.444937] env[69994]: DEBUG nova.compute.manager [req-40990720-b3d8-41de-857c-2b9124a3d354 req-249e9fb6-8ea8-4553-bf66-37cf0b63803e service nova] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Refreshing instance network info cache due to event network-changed-d778d0d8-5cbc-478a-a9e8-73d16c874a6f. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 876.445250] env[69994]: DEBUG oslo_concurrency.lockutils [req-40990720-b3d8-41de-857c-2b9124a3d354 req-249e9fb6-8ea8-4553-bf66-37cf0b63803e service nova] Acquiring lock "refresh_cache-7ef329a2-4d61-428a-8a43-f309a1e953d6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 876.445489] env[69994]: DEBUG oslo_concurrency.lockutils [req-40990720-b3d8-41de-857c-2b9124a3d354 req-249e9fb6-8ea8-4553-bf66-37cf0b63803e service nova] Acquired lock "refresh_cache-7ef329a2-4d61-428a-8a43-f309a1e953d6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 876.445765] env[69994]: DEBUG nova.network.neutron [req-40990720-b3d8-41de-857c-2b9124a3d354 req-249e9fb6-8ea8-4553-bf66-37cf0b63803e service nova] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Refreshing network info cache for port d778d0d8-5cbc-478a-a9e8-73d16c874a6f {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 876.447160] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 876.447160] env[69994]: value = "task-3241950" [ 876.447160] env[69994]: _type = "Task" [ 876.447160] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.454687] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92976dfb-8563-4e26-8b09-86d02aca3dee {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.461249] env[69994]: DEBUG oslo_vmware.api [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 876.461249] env[69994]: value = "task-3241951" [ 876.461249] env[69994]: _type = "Task" [ 876.461249] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.479764] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Instance VIF info [] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 876.485755] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 876.485994] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241950, 'name': CreateVM_Task} progress is 15%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.487051] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 876.487314] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-be2cb595-562e-45e3-aa6b-633f5dc8d5ef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.503389] env[69994]: DEBUG oslo_vmware.api [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3241951, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.507551] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 876.507551] env[69994]: value = "task-3241952" [ 876.507551] env[69994]: _type = "Task" [ 876.507551] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.515198] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241952, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.659178] env[69994]: DEBUG oslo_vmware.api [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Task: {'id': task-3241948, 'name': ReconfigVM_Task, 'duration_secs': 0.575674} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.659415] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Reconfigured VM instance instance-0000003d to attach disk [datastore1] volume-b7a10ba0-24cb-4e9b-a0d8-098524f6ac67/volume-b7a10ba0-24cb-4e9b-a0d8-098524f6ac67.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 876.664593] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f03380aa-f9f2-434b-b244-516f127b3533 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.680169] env[69994]: DEBUG oslo_vmware.api [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Waiting for the task: (returnval){ [ 876.680169] env[69994]: value = "task-3241953" [ 876.680169] env[69994]: _type = "Task" [ 876.680169] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.688611] env[69994]: DEBUG oslo_vmware.api [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Task: {'id': task-3241953, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.965268] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241950, 'name': CreateVM_Task, 'duration_secs': 0.509091} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.970482] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 876.975125] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 876.975125] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 876.975125] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 876.975962] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7dc8e174-4e3d-47cc-9866-ed4b96b74f7b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.985771] env[69994]: DEBUG oslo_vmware.api [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3241951, 'name': ReconfigVM_Task, 'duration_secs': 0.402919} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.990502] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Reconfigured VM instance instance-0000003e to attach disk [datastore1] 0b975ce0-40a4-48a9-a046-66227636d496/0b975ce0-40a4-48a9-a046-66227636d496.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 876.990926] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2b4d83ba-1cc5-452a-aea3-fbe6fbd1526c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.996295] env[69994]: DEBUG oslo_vmware.api [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for the task: (returnval){ [ 876.996295] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52dec44a-eb3e-99db-4f2e-9228acd9aee4" [ 876.996295] env[69994]: _type = "Task" [ 876.996295] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.001816] env[69994]: DEBUG oslo_vmware.api [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 877.001816] env[69994]: value = "task-3241954" [ 877.001816] env[69994]: _type = "Task" [ 877.001816] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.013169] env[69994]: DEBUG oslo_vmware.api [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52dec44a-eb3e-99db-4f2e-9228acd9aee4, 'name': SearchDatastore_Task, 'duration_secs': 0.01033} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.017212] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 877.017487] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 877.017797] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.017864] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 877.018036] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 877.024060] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9f75851b-75d2-463f-88a8-de77758de5c5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.025845] env[69994]: DEBUG oslo_vmware.api [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3241954, 'name': Rename_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.031771] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241952, 'name': CreateVM_Task, 'duration_secs': 0.367407} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.031933] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 877.032376] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.032550] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 877.032859] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 877.034024] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59d2a12d-40c1-4920-b637-852f672b8384 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.036239] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 877.036416] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 877.040083] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-412f5a61-e9c1-4cd3-918a-ee6783736aa5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.046442] env[69994]: DEBUG oslo_vmware.api [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for the task: (returnval){ [ 877.046442] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52bde301-2f02-a989-174d-490c1f3d7011" [ 877.046442] env[69994]: _type = "Task" [ 877.046442] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.047773] env[69994]: DEBUG oslo_vmware.api [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Waiting for the task: (returnval){ [ 877.047773] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5213fd88-804a-1b41-0fb8-464d47959c9e" [ 877.047773] env[69994]: _type = "Task" [ 877.047773] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.062543] env[69994]: DEBUG oslo_vmware.api [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52bde301-2f02-a989-174d-490c1f3d7011, 'name': SearchDatastore_Task, 'duration_secs': 0.011269} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.066394] env[69994]: DEBUG oslo_vmware.api [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5213fd88-804a-1b41-0fb8-464d47959c9e, 'name': SearchDatastore_Task, 'duration_secs': 0.011431} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.066637] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-702f80f2-b0f2-4b6a-8757-80f7cdfefff8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.069260] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 877.069498] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 877.069693] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.075227] env[69994]: DEBUG oslo_vmware.api [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for the task: (returnval){ [ 877.075227] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5250bc7a-272e-7d5f-d2b2-8927b9857f7b" [ 877.075227] env[69994]: _type = "Task" [ 877.075227] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.082876] env[69994]: DEBUG oslo_vmware.api [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5250bc7a-272e-7d5f-d2b2-8927b9857f7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.196732] env[69994]: DEBUG oslo_vmware.api [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Task: {'id': task-3241953, 'name': ReconfigVM_Task, 'duration_secs': 0.221857} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.197038] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647869', 'volume_id': 'b7a10ba0-24cb-4e9b-a0d8-098524f6ac67', 'name': 'volume-b7a10ba0-24cb-4e9b-a0d8-098524f6ac67', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '25a64898-568e-4095-aace-f8a564cdf916', 'attached_at': '', 'detached_at': '', 'volume_id': 'b7a10ba0-24cb-4e9b-a0d8-098524f6ac67', 'serial': 'b7a10ba0-24cb-4e9b-a0d8-098524f6ac67'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 877.197563] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2c6e1a3c-5024-4775-bc71-f7f91532eb8a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.207435] env[69994]: DEBUG oslo_vmware.api [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Waiting for the task: (returnval){ [ 877.207435] env[69994]: value = "task-3241955" [ 877.207435] env[69994]: _type = "Task" [ 877.207435] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.216050] env[69994]: DEBUG oslo_vmware.api [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Task: {'id': task-3241955, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.313779] env[69994]: DEBUG nova.network.neutron [req-40990720-b3d8-41de-857c-2b9124a3d354 req-249e9fb6-8ea8-4553-bf66-37cf0b63803e service nova] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Updated VIF entry in instance network info cache for port d778d0d8-5cbc-478a-a9e8-73d16c874a6f. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 877.314157] env[69994]: DEBUG nova.network.neutron [req-40990720-b3d8-41de-857c-2b9124a3d354 req-249e9fb6-8ea8-4553-bf66-37cf0b63803e service nova] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Updating instance_info_cache with network_info: [{"id": "d778d0d8-5cbc-478a-a9e8-73d16c874a6f", "address": "fa:16:3e:7b:7d:46", "network": {"id": "ffc05707-d9b8-4f04-8df9-5c384e0942c5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1300021035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f035f8fbac46483fb4d70f166df319b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd778d0d8-5c", "ovs_interfaceid": "d778d0d8-5cbc-478a-a9e8-73d16c874a6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 877.326152] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cf35a97-3df6-4b06-b8c3-538e0ab22fb0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.332345] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab14f755-b1a2-4326-b430-80705bc2716a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.365983] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6876fa46-25be-431b-86eb-48962f0f5f6e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.373799] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c16b2b9-bf25-4591-a5c2-d0946d3137fe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.387933] env[69994]: DEBUG nova.compute.provider_tree [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 877.514587] env[69994]: DEBUG oslo_vmware.api [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3241954, 'name': Rename_Task, 'duration_secs': 0.146127} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.514852] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 877.515109] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8b8e2cce-a454-41e1-8542-ab7efd41127e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.520938] env[69994]: DEBUG oslo_vmware.api [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 877.520938] env[69994]: value = "task-3241956" [ 877.520938] env[69994]: _type = "Task" [ 877.520938] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.529331] env[69994]: DEBUG oslo_vmware.api [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3241956, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.585314] env[69994]: DEBUG oslo_vmware.api [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5250bc7a-272e-7d5f-d2b2-8927b9857f7b, 'name': SearchDatastore_Task, 'duration_secs': 0.00988} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.585591] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 877.585846] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 7ef329a2-4d61-428a-8a43-f309a1e953d6/7ef329a2-4d61-428a-8a43-f309a1e953d6.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 877.586773] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 877.586773] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 877.586773] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bc7e26db-c401-42cd-b6af-bf053f11b7d3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.589045] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1e20a3df-41a5-4ea8-8128-7614d0bf2073 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.595183] env[69994]: DEBUG oslo_vmware.api [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for the task: (returnval){ [ 877.595183] env[69994]: value = "task-3241957" [ 877.595183] env[69994]: _type = "Task" [ 877.595183] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.599144] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 877.599331] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 877.600456] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7eaf8da8-cdfd-429e-8aa2-377b497f80f0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.606599] env[69994]: DEBUG oslo_vmware.api [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3241957, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.610076] env[69994]: DEBUG oslo_vmware.api [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Waiting for the task: (returnval){ [ 877.610076] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]521299d9-4422-2c85-effa-a1f7ece497b0" [ 877.610076] env[69994]: _type = "Task" [ 877.610076] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.619901] env[69994]: DEBUG oslo_vmware.api [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521299d9-4422-2c85-effa-a1f7ece497b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.716040] env[69994]: DEBUG oslo_vmware.api [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Task: {'id': task-3241955, 'name': Rename_Task, 'duration_secs': 0.137101} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.716510] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 877.716510] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-244862d9-0db7-4678-8a58-5a49a741d563 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.722361] env[69994]: DEBUG oslo_vmware.api [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Waiting for the task: (returnval){ [ 877.722361] env[69994]: value = "task-3241958" [ 877.722361] env[69994]: _type = "Task" [ 877.722361] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.731353] env[69994]: DEBUG oslo_vmware.api [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Task: {'id': task-3241958, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.817721] env[69994]: DEBUG oslo_concurrency.lockutils [req-40990720-b3d8-41de-857c-2b9124a3d354 req-249e9fb6-8ea8-4553-bf66-37cf0b63803e service nova] Releasing lock "refresh_cache-7ef329a2-4d61-428a-8a43-f309a1e953d6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 877.898279] env[69994]: DEBUG nova.scheduler.client.report [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 878.031999] env[69994]: DEBUG oslo_vmware.api [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3241956, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.105082] env[69994]: DEBUG oslo_vmware.api [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3241957, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.468943} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.105327] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 7ef329a2-4d61-428a-8a43-f309a1e953d6/7ef329a2-4d61-428a-8a43-f309a1e953d6.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 878.105539] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 878.105791] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-33c2a612-a61d-4f5a-9063-ef04c1459abb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.112147] env[69994]: DEBUG oslo_vmware.api [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for the task: (returnval){ [ 878.112147] env[69994]: value = "task-3241959" [ 878.112147] env[69994]: _type = "Task" [ 878.112147] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.123795] env[69994]: DEBUG oslo_vmware.api [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521299d9-4422-2c85-effa-a1f7ece497b0, 'name': SearchDatastore_Task, 'duration_secs': 0.009747} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.127722] env[69994]: DEBUG oslo_vmware.api [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3241959, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.127972] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1e72c08-32e1-4b1b-a85b-33da93a08b61 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.133108] env[69994]: DEBUG oslo_vmware.api [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Waiting for the task: (returnval){ [ 878.133108] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52877061-8f6e-a836-e489-05a824284ded" [ 878.133108] env[69994]: _type = "Task" [ 878.133108] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.141014] env[69994]: DEBUG oslo_vmware.api [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52877061-8f6e-a836-e489-05a824284ded, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.235683] env[69994]: DEBUG oslo_vmware.api [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Task: {'id': task-3241958, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.404360] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.613s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 878.404985] env[69994]: DEBUG nova.compute.manager [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 878.408349] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.199s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 878.410007] env[69994]: INFO nova.compute.claims [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 878.531616] env[69994]: DEBUG oslo_vmware.api [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3241956, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.625196] env[69994]: DEBUG oslo_vmware.api [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3241959, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087568} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.625466] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 878.626274] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9f361a6-432e-4dbf-9c18-d0c47eb12206 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.649638] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] 7ef329a2-4d61-428a-8a43-f309a1e953d6/7ef329a2-4d61-428a-8a43-f309a1e953d6.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 878.653066] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-17995cd4-f058-47d5-8c83-f1bca116f143 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.673975] env[69994]: DEBUG oslo_vmware.api [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52877061-8f6e-a836-e489-05a824284ded, 'name': SearchDatastore_Task, 'duration_secs': 0.009213} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.675332] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 878.675633] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] f3268fe1-768c-4d27-828a-5885ce166f90/f3268fe1-768c-4d27-828a-5885ce166f90.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 878.675954] env[69994]: DEBUG oslo_vmware.api [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for the task: (returnval){ [ 878.675954] env[69994]: value = "task-3241960" [ 878.675954] env[69994]: _type = "Task" [ 878.675954] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.676176] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-63b30450-7710-463c-9eac-d396745a9ddd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.685244] env[69994]: DEBUG oslo_vmware.api [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Waiting for the task: (returnval){ [ 878.685244] env[69994]: value = "task-3241961" [ 878.685244] env[69994]: _type = "Task" [ 878.685244] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.688155] env[69994]: DEBUG oslo_vmware.api [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3241960, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.737470] env[69994]: DEBUG oslo_vmware.api [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Task: {'id': task-3241958, 'name': PowerOnVM_Task, 'duration_secs': 0.549272} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.737822] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 878.738067] env[69994]: INFO nova.compute.manager [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Took 8.88 seconds to spawn the instance on the hypervisor. [ 878.738310] env[69994]: DEBUG nova.compute.manager [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 878.739233] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a8d376b-663a-42d6-a503-9c573d8227e7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.915750] env[69994]: DEBUG nova.compute.utils [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 878.919598] env[69994]: DEBUG nova.compute.manager [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 878.919851] env[69994]: DEBUG nova.network.neutron [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 878.980836] env[69994]: DEBUG nova.policy [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '471a2a9e2b4a4d3da7935b67e87b0fe8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '30a9ea2f804f49ec8c5c6861b507454e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 879.032176] env[69994]: DEBUG oslo_vmware.api [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3241956, 'name': PowerOnVM_Task, 'duration_secs': 1.233559} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.032503] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 879.032503] env[69994]: INFO nova.compute.manager [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Took 8.23 seconds to spawn the instance on the hypervisor. [ 879.032597] env[69994]: DEBUG nova.compute.manager [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 879.033390] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27a27fb4-e22a-418a-b0ab-8fc274d926f2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.189249] env[69994]: DEBUG oslo_vmware.api [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3241960, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.198470] env[69994]: DEBUG oslo_vmware.api [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': task-3241961, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.485592} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.200078] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] f3268fe1-768c-4d27-828a-5885ce166f90/f3268fe1-768c-4d27-828a-5885ce166f90.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 879.200078] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 879.200078] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c5c4dafe-68e3-483c-9333-d828f3d78342 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.208851] env[69994]: DEBUG oslo_vmware.api [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Waiting for the task: (returnval){ [ 879.208851] env[69994]: value = "task-3241962" [ 879.208851] env[69994]: _type = "Task" [ 879.208851] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.217460] env[69994]: DEBUG oslo_vmware.api [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': task-3241962, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.266738] env[69994]: INFO nova.compute.manager [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Took 54.60 seconds to build instance. [ 879.425029] env[69994]: DEBUG nova.compute.manager [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 879.489656] env[69994]: DEBUG nova.network.neutron [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Successfully created port: 37af0480-c14f-4941-b963-b25c22c833b3 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 879.554658] env[69994]: INFO nova.compute.manager [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Took 52.33 seconds to build instance. [ 879.693866] env[69994]: DEBUG oslo_vmware.api [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3241960, 'name': ReconfigVM_Task, 'duration_secs': 0.629765} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.696750] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Reconfigured VM instance instance-0000003f to attach disk [datastore1] 7ef329a2-4d61-428a-8a43-f309a1e953d6/7ef329a2-4d61-428a-8a43-f309a1e953d6.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 879.697637] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0b444ac0-e55c-4364-96a4-8611d796f7ff {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.704732] env[69994]: DEBUG oslo_vmware.api [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for the task: (returnval){ [ 879.704732] env[69994]: value = "task-3241963" [ 879.704732] env[69994]: _type = "Task" [ 879.704732] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.718422] env[69994]: DEBUG oslo_vmware.api [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3241963, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.723174] env[69994]: DEBUG oslo_vmware.api [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': task-3241962, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071202} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.723174] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 879.723303] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb7c8c95-18b5-4906-b82c-242ecf61c94e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.746410] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] f3268fe1-768c-4d27-828a-5885ce166f90/f3268fe1-768c-4d27-828a-5885ce166f90.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 879.749304] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d7236f00-d0a1-4182-8180-5baadb50cdbb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.769379] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d27b943f-a640-4f50-a54a-21736fa4b191 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Lock "25a64898-568e-4095-aace-f8a564cdf916" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.674s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 879.769798] env[69994]: DEBUG oslo_vmware.api [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Waiting for the task: (returnval){ [ 879.769798] env[69994]: value = "task-3241964" [ 879.769798] env[69994]: _type = "Task" [ 879.769798] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.786364] env[69994]: DEBUG oslo_vmware.api [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': task-3241964, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.972445] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-061bdf75-52d3-4420-b222-10e3ad1d8856 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.982086] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bd86de1-3bed-43ec-9102-2cb6f62ee28a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.015010] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52b1445e-2c84-4c27-b138-5cdc82fe81f0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.023028] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a80fab0-fa17-479b-99c0-1583476814d4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.037612] env[69994]: DEBUG nova.compute.provider_tree [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 880.057674] env[69994]: DEBUG oslo_concurrency.lockutils [None req-566b3d08-c7b3-487e-9538-7a97e9191856 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "0b975ce0-40a4-48a9-a046-66227636d496" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.743s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 880.215040] env[69994]: DEBUG oslo_vmware.api [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3241963, 'name': Rename_Task, 'duration_secs': 0.473368} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.218019] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 880.218019] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-45a3d36c-e4f0-4d68-be74-94404178dcac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.221750] env[69994]: DEBUG oslo_vmware.api [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for the task: (returnval){ [ 880.221750] env[69994]: value = "task-3241965" [ 880.221750] env[69994]: _type = "Task" [ 880.221750] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.230555] env[69994]: DEBUG oslo_vmware.api [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3241965, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.263383] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Acquiring lock "17389887-5463-44e1-b1c0-f123d8dedec7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 880.263713] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Lock "17389887-5463-44e1-b1c0-f123d8dedec7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 880.277532] env[69994]: DEBUG nova.compute.manager [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 880.288482] env[69994]: DEBUG oslo_vmware.api [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': task-3241964, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.440523] env[69994]: DEBUG nova.compute.manager [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 880.477337] env[69994]: DEBUG nova.virt.hardware [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 880.477442] env[69994]: DEBUG nova.virt.hardware [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 880.477574] env[69994]: DEBUG nova.virt.hardware [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 880.478417] env[69994]: DEBUG nova.virt.hardware [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 880.478417] env[69994]: DEBUG nova.virt.hardware [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 880.478417] env[69994]: DEBUG nova.virt.hardware [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 880.478417] env[69994]: DEBUG nova.virt.hardware [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 880.479539] env[69994]: DEBUG nova.virt.hardware [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 880.479539] env[69994]: DEBUG nova.virt.hardware [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 880.479539] env[69994]: DEBUG nova.virt.hardware [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 880.479539] env[69994]: DEBUG nova.virt.hardware [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 880.480062] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55a1b2d5-e329-45ae-97f8-82912d1384a7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.488945] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c2d20a6-35da-44e4-9918-97f8a3d1b06b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.541703] env[69994]: DEBUG nova.scheduler.client.report [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 880.733069] env[69994]: DEBUG oslo_vmware.api [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3241965, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.766335] env[69994]: DEBUG nova.compute.manager [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 880.785087] env[69994]: DEBUG oslo_vmware.api [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': task-3241964, 'name': ReconfigVM_Task, 'duration_secs': 0.85235} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.785462] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Reconfigured VM instance instance-0000003c to attach disk [datastore1] f3268fe1-768c-4d27-828a-5885ce166f90/f3268fe1-768c-4d27-828a-5885ce166f90.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 880.789040] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c0b912de-90d5-4013-82ab-091f8a691534 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.795900] env[69994]: DEBUG oslo_vmware.api [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Waiting for the task: (returnval){ [ 880.795900] env[69994]: value = "task-3241966" [ 880.795900] env[69994]: _type = "Task" [ 880.795900] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.814908] env[69994]: DEBUG oslo_vmware.api [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': task-3241966, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.822465] env[69994]: DEBUG oslo_concurrency.lockutils [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 881.047708] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.639s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 881.048599] env[69994]: DEBUG nova.compute.manager [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 881.053461] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7dee3b5f-b513-4e81-8aff-bea9bafd7982 tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 42.498s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 881.053829] env[69994]: DEBUG nova.objects.instance [None req-7dee3b5f-b513-4e81-8aff-bea9bafd7982 tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Lazy-loading 'resources' on Instance uuid 2d812174-d2ad-4fac-8ae5-ffa51d691374 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 881.215110] env[69994]: DEBUG nova.compute.manager [req-8293db3c-1d35-4cdf-8e0a-2d620f30333c req-b818e1c7-44ce-4666-a2d8-32a45ffad665 service nova] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Received event network-changed-738bec83-fa63-41a5-899f-73cdd8bec4ba {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 881.215110] env[69994]: DEBUG nova.compute.manager [req-8293db3c-1d35-4cdf-8e0a-2d620f30333c req-b818e1c7-44ce-4666-a2d8-32a45ffad665 service nova] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Refreshing instance network info cache due to event network-changed-738bec83-fa63-41a5-899f-73cdd8bec4ba. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 881.215110] env[69994]: DEBUG oslo_concurrency.lockutils [req-8293db3c-1d35-4cdf-8e0a-2d620f30333c req-b818e1c7-44ce-4666-a2d8-32a45ffad665 service nova] Acquiring lock "refresh_cache-25a64898-568e-4095-aace-f8a564cdf916" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.215480] env[69994]: DEBUG oslo_concurrency.lockutils [req-8293db3c-1d35-4cdf-8e0a-2d620f30333c req-b818e1c7-44ce-4666-a2d8-32a45ffad665 service nova] Acquired lock "refresh_cache-25a64898-568e-4095-aace-f8a564cdf916" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 881.215936] env[69994]: DEBUG nova.network.neutron [req-8293db3c-1d35-4cdf-8e0a-2d620f30333c req-b818e1c7-44ce-4666-a2d8-32a45ffad665 service nova] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Refreshing network info cache for port 738bec83-fa63-41a5-899f-73cdd8bec4ba {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 881.234340] env[69994]: DEBUG oslo_vmware.api [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3241965, 'name': PowerOnVM_Task, 'duration_secs': 0.732012} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.234669] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 881.234916] env[69994]: INFO nova.compute.manager [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Took 7.75 seconds to spawn the instance on the hypervisor. [ 881.235178] env[69994]: DEBUG nova.compute.manager [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 881.236461] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54d97b7c-ac72-4d3e-9ea2-598741f8dd35 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.289660] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 881.305741] env[69994]: DEBUG oslo_vmware.api [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': task-3241966, 'name': Rename_Task, 'duration_secs': 0.199154} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.306763] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 881.307209] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e4ac30e8-2090-48e8-b024-833dfe9d8d3c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.310683] env[69994]: DEBUG oslo_concurrency.lockutils [None req-95b45864-825b-4791-9876-d2fc96222cd2 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "0b975ce0-40a4-48a9-a046-66227636d496" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 881.310913] env[69994]: DEBUG oslo_concurrency.lockutils [None req-95b45864-825b-4791-9876-d2fc96222cd2 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "0b975ce0-40a4-48a9-a046-66227636d496" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 881.317689] env[69994]: DEBUG oslo_vmware.api [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Waiting for the task: (returnval){ [ 881.317689] env[69994]: value = "task-3241967" [ 881.317689] env[69994]: _type = "Task" [ 881.317689] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.329407] env[69994]: DEBUG oslo_vmware.api [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': task-3241967, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.416376] env[69994]: DEBUG nova.network.neutron [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Successfully updated port: 37af0480-c14f-4941-b963-b25c22c833b3 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 881.557167] env[69994]: DEBUG nova.compute.utils [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 881.561603] env[69994]: DEBUG nova.compute.manager [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 881.561785] env[69994]: DEBUG nova.network.neutron [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: ff645ae7-940e-4842-8915-a96d36d08067] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 881.613278] env[69994]: DEBUG nova.policy [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2ccfdf4c5e604bb3a5eca0ac5727774c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e5acf9a4a9344d4c9c91b75e83cf7a76', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 881.754977] env[69994]: INFO nova.compute.manager [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Took 49.77 seconds to build instance. [ 881.815096] env[69994]: DEBUG nova.compute.utils [None req-95b45864-825b-4791-9876-d2fc96222cd2 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 881.827831] env[69994]: DEBUG oslo_vmware.api [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': task-3241967, 'name': PowerOnVM_Task} progress is 78%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.925921] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquiring lock "refresh_cache-ef410b09-8686-409e-8391-d50cd0e0df04" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.926104] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquired lock "refresh_cache-ef410b09-8686-409e-8391-d50cd0e0df04" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 881.926268] env[69994]: DEBUG nova.network.neutron [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 881.965942] env[69994]: DEBUG nova.network.neutron [req-8293db3c-1d35-4cdf-8e0a-2d620f30333c req-b818e1c7-44ce-4666-a2d8-32a45ffad665 service nova] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Updated VIF entry in instance network info cache for port 738bec83-fa63-41a5-899f-73cdd8bec4ba. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 881.966335] env[69994]: DEBUG nova.network.neutron [req-8293db3c-1d35-4cdf-8e0a-2d620f30333c req-b818e1c7-44ce-4666-a2d8-32a45ffad665 service nova] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Updating instance_info_cache with network_info: [{"id": "738bec83-fa63-41a5-899f-73cdd8bec4ba", "address": "fa:16:3e:70:ee:d0", "network": {"id": "1a5f9db3-2ded-4798-946a-cd79d7da5ae0", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-463906691-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da51434b3d004f1681f76b87d177bd84", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap738bec83-fa", "ovs_interfaceid": "738bec83-fa63-41a5-899f-73cdd8bec4ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.007285] env[69994]: DEBUG nova.network.neutron [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Successfully created port: d2919329-57fe-4483-b8d9-754310db51d9 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 882.065159] env[69994]: DEBUG nova.compute.manager [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 882.069259] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23b8a328-81d3-4a32-bf73-a1fc52e2ab9b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.079158] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2543e06c-68d7-41b2-a1ea-65d5cf0e5e7b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.114984] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f131a7d1-dac1-4842-848d-b76d8a1eeccd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.123584] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4299f8c5-7f64-4fd8-9bfa-6a2e0bd1ff39 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.138445] env[69994]: DEBUG nova.compute.provider_tree [None req-7dee3b5f-b513-4e81-8aff-bea9bafd7982 tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 882.257046] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4ff9fb83-2761-48a5-a01a-095f5edc2d4b tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Lock "7ef329a2-4d61-428a-8a43-f309a1e953d6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.906s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 882.317861] env[69994]: DEBUG oslo_concurrency.lockutils [None req-95b45864-825b-4791-9876-d2fc96222cd2 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "0b975ce0-40a4-48a9-a046-66227636d496" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 882.329188] env[69994]: DEBUG oslo_vmware.api [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': task-3241967, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.465338] env[69994]: DEBUG nova.network.neutron [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 882.468853] env[69994]: DEBUG oslo_concurrency.lockutils [req-8293db3c-1d35-4cdf-8e0a-2d620f30333c req-b818e1c7-44ce-4666-a2d8-32a45ffad665 service nova] Releasing lock "refresh_cache-25a64898-568e-4095-aace-f8a564cdf916" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 882.608772] env[69994]: DEBUG nova.network.neutron [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Updating instance_info_cache with network_info: [{"id": "37af0480-c14f-4941-b963-b25c22c833b3", "address": "fa:16:3e:57:78:19", "network": {"id": "36dc1c22-7ec0-41a6-a3ca-873fea58c351", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1496166079-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30a9ea2f804f49ec8c5c6861b507454e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37af0480-c1", "ovs_interfaceid": "37af0480-c14f-4941-b963-b25c22c833b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.641538] env[69994]: DEBUG nova.scheduler.client.report [None req-7dee3b5f-b513-4e81-8aff-bea9bafd7982 tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 882.829742] env[69994]: DEBUG oslo_vmware.api [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': task-3241967, 'name': PowerOnVM_Task, 'duration_secs': 1.092632} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.829918] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 882.830280] env[69994]: DEBUG nova.compute.manager [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 882.831483] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d48bc8fb-9655-4db3-9257-41c5004e7256 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.080526] env[69994]: DEBUG nova.compute.manager [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 883.109436] env[69994]: DEBUG nova.virt.hardware [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 883.109681] env[69994]: DEBUG nova.virt.hardware [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 883.109837] env[69994]: DEBUG nova.virt.hardware [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 883.110027] env[69994]: DEBUG nova.virt.hardware [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 883.110183] env[69994]: DEBUG nova.virt.hardware [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 883.110332] env[69994]: DEBUG nova.virt.hardware [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 883.110540] env[69994]: DEBUG nova.virt.hardware [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 883.110698] env[69994]: DEBUG nova.virt.hardware [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 883.111652] env[69994]: DEBUG nova.virt.hardware [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 883.111652] env[69994]: DEBUG nova.virt.hardware [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 883.111652] env[69994]: DEBUG nova.virt.hardware [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 883.112814] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-002885b1-32da-46f6-a7f0-99dfd0959890 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.114913] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Releasing lock "refresh_cache-ef410b09-8686-409e-8391-d50cd0e0df04" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 883.115151] env[69994]: DEBUG nova.compute.manager [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Instance network_info: |[{"id": "37af0480-c14f-4941-b963-b25c22c833b3", "address": "fa:16:3e:57:78:19", "network": {"id": "36dc1c22-7ec0-41a6-a3ca-873fea58c351", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1496166079-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30a9ea2f804f49ec8c5c6861b507454e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37af0480-c1", "ovs_interfaceid": "37af0480-c14f-4941-b963-b25c22c833b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 883.115808] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:57:78:19', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca83c3bc-f3ec-42ab-85b3-192512f766f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '37af0480-c14f-4941-b963-b25c22c833b3', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 883.123031] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 883.123642] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 883.123863] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-816abe81-db54-4a13-862e-9e32bc3b08cd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.142915] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8bb5e47-dab6-4403-9535-507ea2bb90d4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.148166] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7dee3b5f-b513-4e81-8aff-bea9bafd7982 tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.095s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 883.149954] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 883.149954] env[69994]: value = "task-3241968" [ 883.149954] env[69994]: _type = "Task" [ 883.149954] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.150396] env[69994]: DEBUG oslo_concurrency.lockutils [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.990s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 883.151818] env[69994]: INFO nova.compute.claims [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 883.170485] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241968, 'name': CreateVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.192774] env[69994]: INFO nova.scheduler.client.report [None req-7dee3b5f-b513-4e81-8aff-bea9bafd7982 tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Deleted allocations for instance 2d812174-d2ad-4fac-8ae5-ffa51d691374 [ 883.244337] env[69994]: DEBUG nova.compute.manager [req-0fc2432f-21e4-4882-8a33-06f8c31b694f req-35c8a00a-a849-44ea-842a-ff050329eb0d service nova] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Received event network-vif-plugged-37af0480-c14f-4941-b963-b25c22c833b3 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 883.244563] env[69994]: DEBUG oslo_concurrency.lockutils [req-0fc2432f-21e4-4882-8a33-06f8c31b694f req-35c8a00a-a849-44ea-842a-ff050329eb0d service nova] Acquiring lock "ef410b09-8686-409e-8391-d50cd0e0df04-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 883.244776] env[69994]: DEBUG oslo_concurrency.lockutils [req-0fc2432f-21e4-4882-8a33-06f8c31b694f req-35c8a00a-a849-44ea-842a-ff050329eb0d service nova] Lock "ef410b09-8686-409e-8391-d50cd0e0df04-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 883.244934] env[69994]: DEBUG oslo_concurrency.lockutils [req-0fc2432f-21e4-4882-8a33-06f8c31b694f req-35c8a00a-a849-44ea-842a-ff050329eb0d service nova] Lock "ef410b09-8686-409e-8391-d50cd0e0df04-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 883.245123] env[69994]: DEBUG nova.compute.manager [req-0fc2432f-21e4-4882-8a33-06f8c31b694f req-35c8a00a-a849-44ea-842a-ff050329eb0d service nova] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] No waiting events found dispatching network-vif-plugged-37af0480-c14f-4941-b963-b25c22c833b3 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 883.245292] env[69994]: WARNING nova.compute.manager [req-0fc2432f-21e4-4882-8a33-06f8c31b694f req-35c8a00a-a849-44ea-842a-ff050329eb0d service nova] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Received unexpected event network-vif-plugged-37af0480-c14f-4941-b963-b25c22c833b3 for instance with vm_state building and task_state spawning. [ 883.245449] env[69994]: DEBUG nova.compute.manager [req-0fc2432f-21e4-4882-8a33-06f8c31b694f req-35c8a00a-a849-44ea-842a-ff050329eb0d service nova] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Received event network-changed-37af0480-c14f-4941-b963-b25c22c833b3 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 883.245602] env[69994]: DEBUG nova.compute.manager [req-0fc2432f-21e4-4882-8a33-06f8c31b694f req-35c8a00a-a849-44ea-842a-ff050329eb0d service nova] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Refreshing instance network info cache due to event network-changed-37af0480-c14f-4941-b963-b25c22c833b3. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 883.245785] env[69994]: DEBUG oslo_concurrency.lockutils [req-0fc2432f-21e4-4882-8a33-06f8c31b694f req-35c8a00a-a849-44ea-842a-ff050329eb0d service nova] Acquiring lock "refresh_cache-ef410b09-8686-409e-8391-d50cd0e0df04" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.245918] env[69994]: DEBUG oslo_concurrency.lockutils [req-0fc2432f-21e4-4882-8a33-06f8c31b694f req-35c8a00a-a849-44ea-842a-ff050329eb0d service nova] Acquired lock "refresh_cache-ef410b09-8686-409e-8391-d50cd0e0df04" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 883.246090] env[69994]: DEBUG nova.network.neutron [req-0fc2432f-21e4-4882-8a33-06f8c31b694f req-35c8a00a-a849-44ea-842a-ff050329eb0d service nova] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Refreshing network info cache for port 37af0480-c14f-4941-b963-b25c22c833b3 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 883.349365] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 883.439191] env[69994]: DEBUG oslo_concurrency.lockutils [None req-95b45864-825b-4791-9876-d2fc96222cd2 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "0b975ce0-40a4-48a9-a046-66227636d496" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 883.439191] env[69994]: DEBUG oslo_concurrency.lockutils [None req-95b45864-825b-4791-9876-d2fc96222cd2 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "0b975ce0-40a4-48a9-a046-66227636d496" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 883.439191] env[69994]: INFO nova.compute.manager [None req-95b45864-825b-4791-9876-d2fc96222cd2 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Attaching volume ced1da45-c861-43e5-861a-08efce363437 to /dev/sdb [ 883.453726] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b190e198-efbf-4a92-a3fe-4632d3f3c407 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Acquiring lock "7ef329a2-4d61-428a-8a43-f309a1e953d6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 883.456272] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b190e198-efbf-4a92-a3fe-4632d3f3c407 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Lock "7ef329a2-4d61-428a-8a43-f309a1e953d6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 883.456272] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b190e198-efbf-4a92-a3fe-4632d3f3c407 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Acquiring lock "7ef329a2-4d61-428a-8a43-f309a1e953d6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 883.456272] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b190e198-efbf-4a92-a3fe-4632d3f3c407 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Lock "7ef329a2-4d61-428a-8a43-f309a1e953d6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 883.456272] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b190e198-efbf-4a92-a3fe-4632d3f3c407 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Lock "7ef329a2-4d61-428a-8a43-f309a1e953d6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 883.461393] env[69994]: INFO nova.compute.manager [None req-b190e198-efbf-4a92-a3fe-4632d3f3c407 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Terminating instance [ 883.490027] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4821a144-a07b-46ab-9a0e-dc72dce64115 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.496567] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0594c27-b3b7-4cdf-bc16-587ce1a45e23 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.512137] env[69994]: DEBUG nova.virt.block_device [None req-95b45864-825b-4791-9876-d2fc96222cd2 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Updating existing volume attachment record: a91e6d64-0a8c-4af5-8a70-9b8983dadf68 {{(pid=69994) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 883.679220] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241968, 'name': CreateVM_Task, 'duration_secs': 0.319473} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.680249] env[69994]: DEBUG nova.network.neutron [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Successfully updated port: d2919329-57fe-4483-b8d9-754310db51d9 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 883.681602] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 883.683041] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.683041] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 883.683284] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 883.683538] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c77a97e-8826-47a6-b3ef-1fc704176410 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.688779] env[69994]: DEBUG oslo_vmware.api [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 883.688779] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52671dec-e4d6-bde5-6781-3ab27b06850e" [ 883.688779] env[69994]: _type = "Task" [ 883.688779] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.697868] env[69994]: DEBUG oslo_vmware.api [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52671dec-e4d6-bde5-6781-3ab27b06850e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.703465] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7dee3b5f-b513-4e81-8aff-bea9bafd7982 tempest-ServerMetadataNegativeTestJSON-1374838907 tempest-ServerMetadataNegativeTestJSON-1374838907-project-member] Lock "2d812174-d2ad-4fac-8ae5-ffa51d691374" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 49.032s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 883.970068] env[69994]: DEBUG nova.compute.manager [None req-b190e198-efbf-4a92-a3fe-4632d3f3c407 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 883.970752] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b190e198-efbf-4a92-a3fe-4632d3f3c407 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 883.973138] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b642a3ce-d30f-4cb3-a7eb-4771e1c97af8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.987199] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b190e198-efbf-4a92-a3fe-4632d3f3c407 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 883.987199] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b21d78cf-2266-48e0-ae8f-3891eea1eae6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.998122] env[69994]: DEBUG oslo_vmware.api [None req-b190e198-efbf-4a92-a3fe-4632d3f3c407 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for the task: (returnval){ [ 883.998122] env[69994]: value = "task-3241972" [ 883.998122] env[69994]: _type = "Task" [ 883.998122] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.008388] env[69994]: DEBUG oslo_vmware.api [None req-b190e198-efbf-4a92-a3fe-4632d3f3c407 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3241972, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.012551] env[69994]: DEBUG nova.network.neutron [req-0fc2432f-21e4-4882-8a33-06f8c31b694f req-35c8a00a-a849-44ea-842a-ff050329eb0d service nova] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Updated VIF entry in instance network info cache for port 37af0480-c14f-4941-b963-b25c22c833b3. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 884.013102] env[69994]: DEBUG nova.network.neutron [req-0fc2432f-21e4-4882-8a33-06f8c31b694f req-35c8a00a-a849-44ea-842a-ff050329eb0d service nova] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Updating instance_info_cache with network_info: [{"id": "37af0480-c14f-4941-b963-b25c22c833b3", "address": "fa:16:3e:57:78:19", "network": {"id": "36dc1c22-7ec0-41a6-a3ca-873fea58c351", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1496166079-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30a9ea2f804f49ec8c5c6861b507454e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37af0480-c1", "ovs_interfaceid": "37af0480-c14f-4941-b963-b25c22c833b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.183626] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquiring lock "refresh_cache-ff645ae7-940e-4842-8915-a96d36d08067" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.183932] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquired lock "refresh_cache-ff645ae7-940e-4842-8915-a96d36d08067" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 884.186312] env[69994]: DEBUG nova.network.neutron [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 884.203046] env[69994]: DEBUG oslo_vmware.api [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52671dec-e4d6-bde5-6781-3ab27b06850e, 'name': SearchDatastore_Task, 'duration_secs': 0.010214} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.203046] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 884.203046] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 884.203046] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.203046] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 884.203046] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 884.203736] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-03df7eeb-a7f9-4e74-85b2-10f1c6497f2e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.214688] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 884.214889] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 884.215773] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84eee878-89a0-4442-94a6-e508ad12b512 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.228463] env[69994]: DEBUG oslo_vmware.api [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 884.228463] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]525ba105-6797-ecd9-4c0f-ff86585fa729" [ 884.228463] env[69994]: _type = "Task" [ 884.228463] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.237109] env[69994]: DEBUG oslo_vmware.api [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]525ba105-6797-ecd9-4c0f-ff86585fa729, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.357835] env[69994]: DEBUG oslo_concurrency.lockutils [None req-20cc1e2c-f898-41fd-9a1f-b5444a25609d tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Acquiring lock "f3268fe1-768c-4d27-828a-5885ce166f90" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 884.358068] env[69994]: DEBUG oslo_concurrency.lockutils [None req-20cc1e2c-f898-41fd-9a1f-b5444a25609d tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Lock "f3268fe1-768c-4d27-828a-5885ce166f90" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 884.358271] env[69994]: DEBUG oslo_concurrency.lockutils [None req-20cc1e2c-f898-41fd-9a1f-b5444a25609d tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Acquiring lock "f3268fe1-768c-4d27-828a-5885ce166f90-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 884.360021] env[69994]: DEBUG oslo_concurrency.lockutils [None req-20cc1e2c-f898-41fd-9a1f-b5444a25609d tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Lock "f3268fe1-768c-4d27-828a-5885ce166f90-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 884.360021] env[69994]: DEBUG oslo_concurrency.lockutils [None req-20cc1e2c-f898-41fd-9a1f-b5444a25609d tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Lock "f3268fe1-768c-4d27-828a-5885ce166f90-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 884.361103] env[69994]: INFO nova.compute.manager [None req-20cc1e2c-f898-41fd-9a1f-b5444a25609d tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Terminating instance [ 884.509090] env[69994]: DEBUG oslo_vmware.api [None req-b190e198-efbf-4a92-a3fe-4632d3f3c407 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3241972, 'name': PowerOffVM_Task, 'duration_secs': 0.236323} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.512188] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b190e198-efbf-4a92-a3fe-4632d3f3c407 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 884.512483] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b190e198-efbf-4a92-a3fe-4632d3f3c407 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 884.513857] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b1453c12-3e3d-4f7a-b898-56668eb9510d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.515706] env[69994]: DEBUG oslo_concurrency.lockutils [req-0fc2432f-21e4-4882-8a33-06f8c31b694f req-35c8a00a-a849-44ea-842a-ff050329eb0d service nova] Releasing lock "refresh_cache-ef410b09-8686-409e-8391-d50cd0e0df04" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 884.578080] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b190e198-efbf-4a92-a3fe-4632d3f3c407 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 884.578309] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b190e198-efbf-4a92-a3fe-4632d3f3c407 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 884.578492] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b190e198-efbf-4a92-a3fe-4632d3f3c407 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Deleting the datastore file [datastore1] 7ef329a2-4d61-428a-8a43-f309a1e953d6 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 884.578761] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e1e8b4ef-04a1-49e3-83de-c1450e97abf6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.585063] env[69994]: DEBUG oslo_vmware.api [None req-b190e198-efbf-4a92-a3fe-4632d3f3c407 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for the task: (returnval){ [ 884.585063] env[69994]: value = "task-3241974" [ 884.585063] env[69994]: _type = "Task" [ 884.585063] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.593621] env[69994]: DEBUG oslo_vmware.api [None req-b190e198-efbf-4a92-a3fe-4632d3f3c407 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3241974, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.719146] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74ea955e-96c1-4b90-8650-02f3bacd850f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.728240] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e39b23f-c2e0-4a75-ba71-05a5d4e616e1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.767476] env[69994]: DEBUG oslo_vmware.api [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]525ba105-6797-ecd9-4c0f-ff86585fa729, 'name': SearchDatastore_Task, 'duration_secs': 0.009235} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.768652] env[69994]: DEBUG nova.network.neutron [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 884.771939] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a860010-816b-402a-88c7-8d07d3fd30b9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.776244] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-418f5171-3a0c-4aeb-b8c8-3f5b1b0484c2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.789905] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19160ed8-23da-4ad1-a1ea-4ae6899a66c0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.791822] env[69994]: DEBUG oslo_vmware.api [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 884.791822] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5222806e-cf37-627e-cf7e-95989a31d23e" [ 884.791822] env[69994]: _type = "Task" [ 884.791822] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.809193] env[69994]: DEBUG nova.compute.provider_tree [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 884.816489] env[69994]: DEBUG oslo_vmware.api [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5222806e-cf37-627e-cf7e-95989a31d23e, 'name': SearchDatastore_Task, 'duration_secs': 0.009287} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.816667] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 884.816963] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] ef410b09-8686-409e-8391-d50cd0e0df04/ef410b09-8686-409e-8391-d50cd0e0df04.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 884.817298] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-43ffab03-d455-4073-a7f7-b363e81419cd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.823960] env[69994]: DEBUG oslo_vmware.api [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 884.823960] env[69994]: value = "task-3241975" [ 884.823960] env[69994]: _type = "Task" [ 884.823960] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.832868] env[69994]: DEBUG oslo_vmware.api [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241975, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.870446] env[69994]: DEBUG oslo_concurrency.lockutils [None req-20cc1e2c-f898-41fd-9a1f-b5444a25609d tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Acquiring lock "refresh_cache-f3268fe1-768c-4d27-828a-5885ce166f90" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.870598] env[69994]: DEBUG oslo_concurrency.lockutils [None req-20cc1e2c-f898-41fd-9a1f-b5444a25609d tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Acquired lock "refresh_cache-f3268fe1-768c-4d27-828a-5885ce166f90" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 884.870778] env[69994]: DEBUG nova.network.neutron [None req-20cc1e2c-f898-41fd-9a1f-b5444a25609d tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 884.983168] env[69994]: DEBUG nova.network.neutron [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Updating instance_info_cache with network_info: [{"id": "d2919329-57fe-4483-b8d9-754310db51d9", "address": "fa:16:3e:2b:fc:69", "network": {"id": "dc86f717-1eb9-419c-aa6f-f5dd10804e22", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-830745446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5acf9a4a9344d4c9c91b75e83cf7a76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c66a277b-e3bf-43b8-a632-04fdd0720b91", "external-id": "nsx-vlan-transportzone-665", "segmentation_id": 665, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2919329-57", "ovs_interfaceid": "d2919329-57fe-4483-b8d9-754310db51d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.097860] env[69994]: DEBUG oslo_vmware.api [None req-b190e198-efbf-4a92-a3fe-4632d3f3c407 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3241974, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147911} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.098409] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b190e198-efbf-4a92-a3fe-4632d3f3c407 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 885.098673] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b190e198-efbf-4a92-a3fe-4632d3f3c407 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 885.098915] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b190e198-efbf-4a92-a3fe-4632d3f3c407 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 885.099201] env[69994]: INFO nova.compute.manager [None req-b190e198-efbf-4a92-a3fe-4632d3f3c407 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Took 1.13 seconds to destroy the instance on the hypervisor. [ 885.099538] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b190e198-efbf-4a92-a3fe-4632d3f3c407 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 885.099832] env[69994]: DEBUG nova.compute.manager [-] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 885.100121] env[69994]: DEBUG nova.network.neutron [-] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 885.321026] env[69994]: DEBUG nova.scheduler.client.report [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 885.343022] env[69994]: DEBUG oslo_vmware.api [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241975, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.496856} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.343531] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] ef410b09-8686-409e-8391-d50cd0e0df04/ef410b09-8686-409e-8391-d50cd0e0df04.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 885.347016] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 885.347016] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e0c0bcba-7fd0-4f98-a16b-1e638a8dd9f7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.353607] env[69994]: DEBUG oslo_vmware.api [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 885.353607] env[69994]: value = "task-3241976" [ 885.353607] env[69994]: _type = "Task" [ 885.353607] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.361825] env[69994]: DEBUG oslo_vmware.api [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241976, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.392930] env[69994]: DEBUG nova.network.neutron [None req-20cc1e2c-f898-41fd-9a1f-b5444a25609d tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 885.465048] env[69994]: DEBUG nova.network.neutron [None req-20cc1e2c-f898-41fd-9a1f-b5444a25609d tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.486868] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Releasing lock "refresh_cache-ff645ae7-940e-4842-8915-a96d36d08067" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 885.486868] env[69994]: DEBUG nova.compute.manager [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Instance network_info: |[{"id": "d2919329-57fe-4483-b8d9-754310db51d9", "address": "fa:16:3e:2b:fc:69", "network": {"id": "dc86f717-1eb9-419c-aa6f-f5dd10804e22", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-830745446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5acf9a4a9344d4c9c91b75e83cf7a76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c66a277b-e3bf-43b8-a632-04fdd0720b91", "external-id": "nsx-vlan-transportzone-665", "segmentation_id": 665, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2919329-57", "ovs_interfaceid": "d2919329-57fe-4483-b8d9-754310db51d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 885.486868] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:fc:69', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c66a277b-e3bf-43b8-a632-04fdd0720b91', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd2919329-57fe-4483-b8d9-754310db51d9', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 885.495301] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 885.495797] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 885.496045] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-29e0bf2f-e236-4876-90a9-709eecac0c1d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.523715] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 885.523715] env[69994]: value = "task-3241977" [ 885.523715] env[69994]: _type = "Task" [ 885.523715] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.531918] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241977, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.742622] env[69994]: DEBUG nova.compute.manager [req-c3294ae1-a3dd-41d5-8147-76e03068640d req-4f3d0906-8dcd-4c8c-9792-7310048ad140 service nova] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Received event network-vif-plugged-d2919329-57fe-4483-b8d9-754310db51d9 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 885.742850] env[69994]: DEBUG oslo_concurrency.lockutils [req-c3294ae1-a3dd-41d5-8147-76e03068640d req-4f3d0906-8dcd-4c8c-9792-7310048ad140 service nova] Acquiring lock "ff645ae7-940e-4842-8915-a96d36d08067-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 885.743110] env[69994]: DEBUG oslo_concurrency.lockutils [req-c3294ae1-a3dd-41d5-8147-76e03068640d req-4f3d0906-8dcd-4c8c-9792-7310048ad140 service nova] Lock "ff645ae7-940e-4842-8915-a96d36d08067-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 885.743402] env[69994]: DEBUG oslo_concurrency.lockutils [req-c3294ae1-a3dd-41d5-8147-76e03068640d req-4f3d0906-8dcd-4c8c-9792-7310048ad140 service nova] Lock "ff645ae7-940e-4842-8915-a96d36d08067-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 885.743598] env[69994]: DEBUG nova.compute.manager [req-c3294ae1-a3dd-41d5-8147-76e03068640d req-4f3d0906-8dcd-4c8c-9792-7310048ad140 service nova] [instance: ff645ae7-940e-4842-8915-a96d36d08067] No waiting events found dispatching network-vif-plugged-d2919329-57fe-4483-b8d9-754310db51d9 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 885.743772] env[69994]: WARNING nova.compute.manager [req-c3294ae1-a3dd-41d5-8147-76e03068640d req-4f3d0906-8dcd-4c8c-9792-7310048ad140 service nova] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Received unexpected event network-vif-plugged-d2919329-57fe-4483-b8d9-754310db51d9 for instance with vm_state building and task_state spawning. [ 885.743932] env[69994]: DEBUG nova.compute.manager [req-c3294ae1-a3dd-41d5-8147-76e03068640d req-4f3d0906-8dcd-4c8c-9792-7310048ad140 service nova] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Received event network-changed-d2919329-57fe-4483-b8d9-754310db51d9 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 885.744354] env[69994]: DEBUG nova.compute.manager [req-c3294ae1-a3dd-41d5-8147-76e03068640d req-4f3d0906-8dcd-4c8c-9792-7310048ad140 service nova] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Refreshing instance network info cache due to event network-changed-d2919329-57fe-4483-b8d9-754310db51d9. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 885.744571] env[69994]: DEBUG oslo_concurrency.lockutils [req-c3294ae1-a3dd-41d5-8147-76e03068640d req-4f3d0906-8dcd-4c8c-9792-7310048ad140 service nova] Acquiring lock "refresh_cache-ff645ae7-940e-4842-8915-a96d36d08067" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.744710] env[69994]: DEBUG oslo_concurrency.lockutils [req-c3294ae1-a3dd-41d5-8147-76e03068640d req-4f3d0906-8dcd-4c8c-9792-7310048ad140 service nova] Acquired lock "refresh_cache-ff645ae7-940e-4842-8915-a96d36d08067" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 885.744931] env[69994]: DEBUG nova.network.neutron [req-c3294ae1-a3dd-41d5-8147-76e03068640d req-4f3d0906-8dcd-4c8c-9792-7310048ad140 service nova] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Refreshing network info cache for port d2919329-57fe-4483-b8d9-754310db51d9 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 885.835818] env[69994]: DEBUG oslo_concurrency.lockutils [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.685s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 885.836598] env[69994]: DEBUG nova.compute.manager [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 885.841506] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc89c678-31d3-42e0-89c9-075507c387ab tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.192s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 885.843952] env[69994]: DEBUG nova.objects.instance [None req-bc89c678-31d3-42e0-89c9-075507c387ab tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Lazy-loading 'resources' on Instance uuid 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 885.863713] env[69994]: DEBUG oslo_vmware.api [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241976, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.408707} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.864015] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 885.864807] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b5c6f85-0c9f-4fb8-b939-8e5a50eccc8d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.890642] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Reconfiguring VM instance instance-00000040 to attach disk [datastore2] ef410b09-8686-409e-8391-d50cd0e0df04/ef410b09-8686-409e-8391-d50cd0e0df04.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 885.890866] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1c4c6eee-473b-405b-b83a-fa158e69b805 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.911540] env[69994]: DEBUG oslo_vmware.api [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 885.911540] env[69994]: value = "task-3241978" [ 885.911540] env[69994]: _type = "Task" [ 885.911540] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.921112] env[69994]: DEBUG oslo_vmware.api [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241978, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.933377] env[69994]: DEBUG nova.network.neutron [-] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.967765] env[69994]: DEBUG oslo_concurrency.lockutils [None req-20cc1e2c-f898-41fd-9a1f-b5444a25609d tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Releasing lock "refresh_cache-f3268fe1-768c-4d27-828a-5885ce166f90" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 885.968274] env[69994]: DEBUG nova.compute.manager [None req-20cc1e2c-f898-41fd-9a1f-b5444a25609d tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 885.968525] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-20cc1e2c-f898-41fd-9a1f-b5444a25609d tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 885.969552] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2a8d47c-af8e-44a3-885e-205e72d11896 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.977412] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-20cc1e2c-f898-41fd-9a1f-b5444a25609d tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 885.977685] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-69e600d2-e61b-4309-ab84-5343d60b0e7b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.983935] env[69994]: DEBUG oslo_vmware.api [None req-20cc1e2c-f898-41fd-9a1f-b5444a25609d tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Waiting for the task: (returnval){ [ 885.983935] env[69994]: value = "task-3241979" [ 885.983935] env[69994]: _type = "Task" [ 885.983935] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.993938] env[69994]: DEBUG oslo_vmware.api [None req-20cc1e2c-f898-41fd-9a1f-b5444a25609d tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': task-3241979, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.035754] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241977, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.346773] env[69994]: DEBUG nova.compute.utils [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 886.350316] env[69994]: DEBUG nova.compute.manager [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 886.350495] env[69994]: DEBUG nova.network.neutron [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 886.411362] env[69994]: DEBUG nova.policy [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '464e2e64f77042f69423965a2694b159', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '95dff361679f4d3eb08daf6701c7ab82', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 886.427396] env[69994]: DEBUG oslo_vmware.api [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241978, 'name': ReconfigVM_Task, 'duration_secs': 0.297849} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.430240] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Reconfigured VM instance instance-00000040 to attach disk [datastore2] ef410b09-8686-409e-8391-d50cd0e0df04/ef410b09-8686-409e-8391-d50cd0e0df04.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 886.431757] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-10625b3d-c7e9-4a0a-bdb9-b7dc5f0ac483 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.435440] env[69994]: INFO nova.compute.manager [-] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Took 1.34 seconds to deallocate network for instance. [ 886.438469] env[69994]: DEBUG oslo_vmware.api [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 886.438469] env[69994]: value = "task-3241981" [ 886.438469] env[69994]: _type = "Task" [ 886.438469] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.451815] env[69994]: DEBUG oslo_vmware.api [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241981, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.470441] env[69994]: DEBUG nova.network.neutron [req-c3294ae1-a3dd-41d5-8147-76e03068640d req-4f3d0906-8dcd-4c8c-9792-7310048ad140 service nova] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Updated VIF entry in instance network info cache for port d2919329-57fe-4483-b8d9-754310db51d9. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 886.470850] env[69994]: DEBUG nova.network.neutron [req-c3294ae1-a3dd-41d5-8147-76e03068640d req-4f3d0906-8dcd-4c8c-9792-7310048ad140 service nova] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Updating instance_info_cache with network_info: [{"id": "d2919329-57fe-4483-b8d9-754310db51d9", "address": "fa:16:3e:2b:fc:69", "network": {"id": "dc86f717-1eb9-419c-aa6f-f5dd10804e22", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-830745446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5acf9a4a9344d4c9c91b75e83cf7a76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c66a277b-e3bf-43b8-a632-04fdd0720b91", "external-id": "nsx-vlan-transportzone-665", "segmentation_id": 665, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2919329-57", "ovs_interfaceid": "d2919329-57fe-4483-b8d9-754310db51d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.500133] env[69994]: DEBUG oslo_vmware.api [None req-20cc1e2c-f898-41fd-9a1f-b5444a25609d tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': task-3241979, 'name': PowerOffVM_Task, 'duration_secs': 0.176757} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.500445] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-20cc1e2c-f898-41fd-9a1f-b5444a25609d tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 886.501113] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-20cc1e2c-f898-41fd-9a1f-b5444a25609d tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 886.501381] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b04468bd-31fb-41df-9807-0a3c47b8bfc1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.524099] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-20cc1e2c-f898-41fd-9a1f-b5444a25609d tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 886.524365] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-20cc1e2c-f898-41fd-9a1f-b5444a25609d tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 886.524549] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-20cc1e2c-f898-41fd-9a1f-b5444a25609d tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Deleting the datastore file [datastore1] f3268fe1-768c-4d27-828a-5885ce166f90 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 886.524824] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-11adb15f-f166-4673-a5e0-7844946495e1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.535445] env[69994]: DEBUG oslo_vmware.api [None req-20cc1e2c-f898-41fd-9a1f-b5444a25609d tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Waiting for the task: (returnval){ [ 886.535445] env[69994]: value = "task-3241983" [ 886.535445] env[69994]: _type = "Task" [ 886.535445] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.542102] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241977, 'name': CreateVM_Task, 'duration_secs': 0.621549} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.542600] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 886.543444] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.543619] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 886.543974] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 886.551497] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f921a416-f379-4ee3-a87b-53dd640c17c6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.554638] env[69994]: DEBUG oslo_vmware.api [None req-20cc1e2c-f898-41fd-9a1f-b5444a25609d tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': task-3241983, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.559679] env[69994]: DEBUG oslo_vmware.api [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 886.559679] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52985957-063a-7220-af34-28086d87c2a8" [ 886.559679] env[69994]: _type = "Task" [ 886.559679] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.569588] env[69994]: DEBUG oslo_vmware.api [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52985957-063a-7220-af34-28086d87c2a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.855420] env[69994]: DEBUG nova.compute.manager [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 886.910633] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f454960-a8d4-4042-bc58-3cfb33bb2a77 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.918992] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74bd9b57-7575-41e2-bd2b-2e2e8e824dcc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.963447] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b190e198-efbf-4a92-a3fe-4632d3f3c407 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 886.967384] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7270482b-27bd-481c-8b38-4c14be6a074a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.976597] env[69994]: DEBUG oslo_concurrency.lockutils [req-c3294ae1-a3dd-41d5-8147-76e03068640d req-4f3d0906-8dcd-4c8c-9792-7310048ad140 service nova] Releasing lock "refresh_cache-ff645ae7-940e-4842-8915-a96d36d08067" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 886.976974] env[69994]: DEBUG oslo_vmware.api [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241981, 'name': Rename_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.980139] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2e09460-9d53-4971-afd1-f3d855bbff71 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.993696] env[69994]: DEBUG nova.compute.provider_tree [None req-bc89c678-31d3-42e0-89c9-075507c387ab tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 887.044600] env[69994]: DEBUG oslo_vmware.api [None req-20cc1e2c-f898-41fd-9a1f-b5444a25609d tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Task: {'id': task-3241983, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.1063} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.044850] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-20cc1e2c-f898-41fd-9a1f-b5444a25609d tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 887.045278] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-20cc1e2c-f898-41fd-9a1f-b5444a25609d tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 887.045513] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-20cc1e2c-f898-41fd-9a1f-b5444a25609d tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 887.045693] env[69994]: INFO nova.compute.manager [None req-20cc1e2c-f898-41fd-9a1f-b5444a25609d tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Took 1.08 seconds to destroy the instance on the hypervisor. [ 887.045954] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-20cc1e2c-f898-41fd-9a1f-b5444a25609d tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 887.047054] env[69994]: DEBUG nova.compute.manager [-] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 887.047054] env[69994]: DEBUG nova.network.neutron [-] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 887.063706] env[69994]: DEBUG nova.network.neutron [-] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 887.073269] env[69994]: DEBUG oslo_vmware.api [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52985957-063a-7220-af34-28086d87c2a8, 'name': SearchDatastore_Task, 'duration_secs': 0.008817} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.073269] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 887.073269] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 887.073269] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.073269] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 887.073269] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 887.074087] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4bc60b23-3cc0-4814-811c-a09c1fadebc2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.090119] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 887.090194] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 887.090896] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59e6cae8-7a86-4584-b4eb-77826ff39ec6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.097286] env[69994]: DEBUG oslo_vmware.api [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 887.097286] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52ac322a-c286-62ba-8b43-d98adff1e318" [ 887.097286] env[69994]: _type = "Task" [ 887.097286] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.104775] env[69994]: DEBUG oslo_vmware.api [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52ac322a-c286-62ba-8b43-d98adff1e318, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.154817] env[69994]: DEBUG nova.network.neutron [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Successfully created port: 772e74a8-0286-4533-b4ad-63afc1c9e6e4 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 887.472726] env[69994]: DEBUG oslo_vmware.api [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241981, 'name': Rename_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.501124] env[69994]: DEBUG nova.scheduler.client.report [None req-bc89c678-31d3-42e0-89c9-075507c387ab tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 887.566255] env[69994]: DEBUG nova.network.neutron [-] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.607507] env[69994]: DEBUG oslo_vmware.api [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52ac322a-c286-62ba-8b43-d98adff1e318, 'name': SearchDatastore_Task, 'duration_secs': 0.00945} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.608308] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d746a2e-4f5d-4f07-814d-37b9636b8ebc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.613473] env[69994]: DEBUG oslo_vmware.api [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 887.613473] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5257afb5-79f7-b0c7-006b-e7d12c70dc0e" [ 887.613473] env[69994]: _type = "Task" [ 887.613473] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.621154] env[69994]: DEBUG oslo_vmware.api [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5257afb5-79f7-b0c7-006b-e7d12c70dc0e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.771677] env[69994]: DEBUG nova.compute.manager [req-a219e132-a6ac-4a4a-9c99-48e622d91fa5 req-0bd70e3d-c6c9-4b59-8871-ce0e673ef219 service nova] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Received event network-vif-deleted-d778d0d8-5cbc-478a-a9e8-73d16c874a6f {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 887.865541] env[69994]: DEBUG nova.compute.manager [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 887.891615] env[69994]: DEBUG nova.virt.hardware [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 887.891947] env[69994]: DEBUG nova.virt.hardware [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 887.892134] env[69994]: DEBUG nova.virt.hardware [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 887.892322] env[69994]: DEBUG nova.virt.hardware [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 887.892472] env[69994]: DEBUG nova.virt.hardware [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 887.892626] env[69994]: DEBUG nova.virt.hardware [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 887.892827] env[69994]: DEBUG nova.virt.hardware [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 887.892985] env[69994]: DEBUG nova.virt.hardware [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 887.893169] env[69994]: DEBUG nova.virt.hardware [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 887.893332] env[69994]: DEBUG nova.virt.hardware [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 887.893501] env[69994]: DEBUG nova.virt.hardware [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 887.894353] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91101fc6-236c-4737-b38e-019a235f2f2f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.902948] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48b27509-3a7c-4521-a36d-80aad43d17bb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.972043] env[69994]: DEBUG oslo_vmware.api [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241981, 'name': Rename_Task, 'duration_secs': 1.146923} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.972312] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 887.972312] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d553d157-e4e4-4baf-a1d5-d5c372e8315a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.978570] env[69994]: DEBUG oslo_vmware.api [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 887.978570] env[69994]: value = "task-3241984" [ 887.978570] env[69994]: _type = "Task" [ 887.978570] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.986198] env[69994]: DEBUG oslo_vmware.api [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241984, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.006273] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc89c678-31d3-42e0-89c9-075507c387ab tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.165s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 888.008994] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 33.648s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 888.032699] env[69994]: INFO nova.scheduler.client.report [None req-bc89c678-31d3-42e0-89c9-075507c387ab tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Deleted allocations for instance 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8 [ 888.070375] env[69994]: INFO nova.compute.manager [-] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Took 1.02 seconds to deallocate network for instance. [ 888.124150] env[69994]: DEBUG oslo_vmware.api [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5257afb5-79f7-b0c7-006b-e7d12c70dc0e, 'name': SearchDatastore_Task, 'duration_secs': 0.00921} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.124436] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 888.124696] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] ff645ae7-940e-4842-8915-a96d36d08067/ff645ae7-940e-4842-8915-a96d36d08067.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 888.125285] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a8d6a9b6-2a0e-4780-a3b2-c31473b704a0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.131820] env[69994]: DEBUG oslo_vmware.api [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 888.131820] env[69994]: value = "task-3241985" [ 888.131820] env[69994]: _type = "Task" [ 888.131820] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.139839] env[69994]: DEBUG oslo_vmware.api [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241985, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.491908] env[69994]: DEBUG oslo_vmware.api [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241984, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.515322] env[69994]: INFO nova.compute.claims [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 888.540015] env[69994]: DEBUG oslo_concurrency.lockutils [None req-bc89c678-31d3-42e0-89c9-075507c387ab tempest-ServerPasswordTestJSON-1198855796 tempest-ServerPasswordTestJSON-1198855796-project-member] Lock "6d99c52e-8893-4ad7-8d8e-56bd8c9379b8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.418s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 888.564590] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-95b45864-825b-4791-9876-d2fc96222cd2 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Volume attach. Driver type: vmdk {{(pid=69994) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 888.564837] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-95b45864-825b-4791-9876-d2fc96222cd2 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647919', 'volume_id': 'ced1da45-c861-43e5-861a-08efce363437', 'name': 'volume-ced1da45-c861-43e5-861a-08efce363437', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0b975ce0-40a4-48a9-a046-66227636d496', 'attached_at': '', 'detached_at': '', 'volume_id': 'ced1da45-c861-43e5-861a-08efce363437', 'serial': 'ced1da45-c861-43e5-861a-08efce363437'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 888.565871] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e8e8a60-c007-4e26-b1bd-30525eea4bf6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.591229] env[69994]: DEBUG oslo_concurrency.lockutils [None req-20cc1e2c-f898-41fd-9a1f-b5444a25609d tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 888.595120] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13570b45-4bbf-4257-a11f-3ac7a2c27d66 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.641553] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-95b45864-825b-4791-9876-d2fc96222cd2 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Reconfiguring VM instance instance-0000003e to attach disk [datastore2] volume-ced1da45-c861-43e5-861a-08efce363437/volume-ced1da45-c861-43e5-861a-08efce363437.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 888.642320] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-de600423-43d5-44cc-aadc-9b4da8884377 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.680051] env[69994]: DEBUG oslo_vmware.api [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241985, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.681189] env[69994]: DEBUG oslo_vmware.api [None req-95b45864-825b-4791-9876-d2fc96222cd2 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 888.681189] env[69994]: value = "task-3241986" [ 888.681189] env[69994]: _type = "Task" [ 888.681189] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.689152] env[69994]: DEBUG oslo_vmware.api [None req-95b45864-825b-4791-9876-d2fc96222cd2 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3241986, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.922796] env[69994]: DEBUG nova.network.neutron [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Successfully updated port: 772e74a8-0286-4533-b4ad-63afc1c9e6e4 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 888.989120] env[69994]: DEBUG oslo_vmware.api [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3241984, 'name': PowerOnVM_Task, 'duration_secs': 0.584962} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.989431] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 888.989749] env[69994]: INFO nova.compute.manager [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Took 8.55 seconds to spawn the instance on the hypervisor. [ 888.989819] env[69994]: DEBUG nova.compute.manager [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 888.990576] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7068e5a9-5426-432a-b845-d75d6e95fd23 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.025026] env[69994]: INFO nova.compute.resource_tracker [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Updating resource usage from migration a470e5c8-07de-40fb-a4de-5addff23af5a [ 889.151152] env[69994]: DEBUG oslo_vmware.api [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241985, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.573244} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.152159] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] ff645ae7-940e-4842-8915-a96d36d08067/ff645ae7-940e-4842-8915-a96d36d08067.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 889.152159] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 889.152159] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7d8c6f24-5052-441c-b018-116531221139 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.162103] env[69994]: DEBUG oslo_vmware.api [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 889.162103] env[69994]: value = "task-3241987" [ 889.162103] env[69994]: _type = "Task" [ 889.162103] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.169902] env[69994]: DEBUG oslo_vmware.api [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241987, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.196377] env[69994]: DEBUG oslo_vmware.api [None req-95b45864-825b-4791-9876-d2fc96222cd2 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3241986, 'name': ReconfigVM_Task, 'duration_secs': 0.442848} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.196662] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-95b45864-825b-4791-9876-d2fc96222cd2 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Reconfigured VM instance instance-0000003e to attach disk [datastore2] volume-ced1da45-c861-43e5-861a-08efce363437/volume-ced1da45-c861-43e5-861a-08efce363437.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 889.205485] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cb0c0894-abc4-4672-b518-94c06fc7441a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.223109] env[69994]: DEBUG oslo_vmware.api [None req-95b45864-825b-4791-9876-d2fc96222cd2 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 889.223109] env[69994]: value = "task-3241988" [ 889.223109] env[69994]: _type = "Task" [ 889.223109] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.233360] env[69994]: DEBUG oslo_vmware.api [None req-95b45864-825b-4791-9876-d2fc96222cd2 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3241988, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.425621] env[69994]: DEBUG oslo_concurrency.lockutils [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Acquiring lock "refresh_cache-e3697388-4598-4dde-8c20-43fc7665083b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.425790] env[69994]: DEBUG oslo_concurrency.lockutils [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Acquired lock "refresh_cache-e3697388-4598-4dde-8c20-43fc7665083b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 889.425994] env[69994]: DEBUG nova.network.neutron [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 889.510428] env[69994]: INFO nova.compute.manager [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Took 53.71 seconds to build instance. [ 889.533691] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7708b29-f4b7-422b-abfa-741ba783166c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.541388] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b556971-06af-478d-b69e-7879d44a35de {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.576425] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2af6fdf9-ec62-4436-abdd-c75798a93dfc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.584812] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23f05305-0c88-489d-b9a9-c19acffef240 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.598493] env[69994]: DEBUG nova.compute.provider_tree [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 889.674657] env[69994]: DEBUG oslo_vmware.api [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241987, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065185} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.674935] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 889.676085] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d52de80-d54e-472b-8f59-33de06e25c4f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.698389] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Reconfiguring VM instance instance-00000041 to attach disk [datastore2] ff645ae7-940e-4842-8915-a96d36d08067/ff645ae7-940e-4842-8915-a96d36d08067.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 889.698474] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1e806e43-bd06-4c29-bc27-5f168ff6419c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.720583] env[69994]: DEBUG oslo_vmware.api [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 889.720583] env[69994]: value = "task-3241989" [ 889.720583] env[69994]: _type = "Task" [ 889.720583] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.732677] env[69994]: DEBUG oslo_vmware.api [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241989, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.734846] env[69994]: DEBUG oslo_vmware.api [None req-95b45864-825b-4791-9876-d2fc96222cd2 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3241988, 'name': ReconfigVM_Task, 'duration_secs': 0.147789} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.735156] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-95b45864-825b-4791-9876-d2fc96222cd2 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647919', 'volume_id': 'ced1da45-c861-43e5-861a-08efce363437', 'name': 'volume-ced1da45-c861-43e5-861a-08efce363437', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0b975ce0-40a4-48a9-a046-66227636d496', 'attached_at': '', 'detached_at': '', 'volume_id': 'ced1da45-c861-43e5-861a-08efce363437', 'serial': 'ced1da45-c861-43e5-861a-08efce363437'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 889.862068] env[69994]: DEBUG nova.compute.manager [req-7da1e6d7-f92e-46e0-aea1-bdb52794ff99 req-4c29a59d-c9a6-49c7-bc11-998e91f465d3 service nova] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Received event network-vif-plugged-772e74a8-0286-4533-b4ad-63afc1c9e6e4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 889.862283] env[69994]: DEBUG oslo_concurrency.lockutils [req-7da1e6d7-f92e-46e0-aea1-bdb52794ff99 req-4c29a59d-c9a6-49c7-bc11-998e91f465d3 service nova] Acquiring lock "e3697388-4598-4dde-8c20-43fc7665083b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 889.862499] env[69994]: DEBUG oslo_concurrency.lockutils [req-7da1e6d7-f92e-46e0-aea1-bdb52794ff99 req-4c29a59d-c9a6-49c7-bc11-998e91f465d3 service nova] Lock "e3697388-4598-4dde-8c20-43fc7665083b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 889.862668] env[69994]: DEBUG oslo_concurrency.lockutils [req-7da1e6d7-f92e-46e0-aea1-bdb52794ff99 req-4c29a59d-c9a6-49c7-bc11-998e91f465d3 service nova] Lock "e3697388-4598-4dde-8c20-43fc7665083b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 889.862828] env[69994]: DEBUG nova.compute.manager [req-7da1e6d7-f92e-46e0-aea1-bdb52794ff99 req-4c29a59d-c9a6-49c7-bc11-998e91f465d3 service nova] [instance: e3697388-4598-4dde-8c20-43fc7665083b] No waiting events found dispatching network-vif-plugged-772e74a8-0286-4533-b4ad-63afc1c9e6e4 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 889.862989] env[69994]: WARNING nova.compute.manager [req-7da1e6d7-f92e-46e0-aea1-bdb52794ff99 req-4c29a59d-c9a6-49c7-bc11-998e91f465d3 service nova] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Received unexpected event network-vif-plugged-772e74a8-0286-4533-b4ad-63afc1c9e6e4 for instance with vm_state building and task_state spawning. [ 889.863550] env[69994]: DEBUG nova.compute.manager [req-7da1e6d7-f92e-46e0-aea1-bdb52794ff99 req-4c29a59d-c9a6-49c7-bc11-998e91f465d3 service nova] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Received event network-changed-772e74a8-0286-4533-b4ad-63afc1c9e6e4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 889.863550] env[69994]: DEBUG nova.compute.manager [req-7da1e6d7-f92e-46e0-aea1-bdb52794ff99 req-4c29a59d-c9a6-49c7-bc11-998e91f465d3 service nova] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Refreshing instance network info cache due to event network-changed-772e74a8-0286-4533-b4ad-63afc1c9e6e4. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 889.864249] env[69994]: DEBUG oslo_concurrency.lockutils [req-7da1e6d7-f92e-46e0-aea1-bdb52794ff99 req-4c29a59d-c9a6-49c7-bc11-998e91f465d3 service nova] Acquiring lock "refresh_cache-e3697388-4598-4dde-8c20-43fc7665083b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.960016] env[69994]: DEBUG nova.network.neutron [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 890.012902] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fcdca6d-92e5-4e30-ae40-330dc4a741d9 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lock "ef410b09-8686-409e-8391-d50cd0e0df04" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.973s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 890.101955] env[69994]: DEBUG nova.scheduler.client.report [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 890.107788] env[69994]: DEBUG nova.network.neutron [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Updating instance_info_cache with network_info: [{"id": "772e74a8-0286-4533-b4ad-63afc1c9e6e4", "address": "fa:16:3e:b9:e0:62", "network": {"id": "2e51d0d8-050c-40b5-b20d-b754df62b09e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-955665594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95dff361679f4d3eb08daf6701c7ab82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d31a554-a94c-4471-892f-f65aa87b8279", "external-id": "nsx-vlan-transportzone-241", "segmentation_id": 241, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap772e74a8-02", "ovs_interfaceid": "772e74a8-0286-4533-b4ad-63afc1c9e6e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 890.231481] env[69994]: DEBUG oslo_vmware.api [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241989, 'name': ReconfigVM_Task, 'duration_secs': 0.303927} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.231761] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Reconfigured VM instance instance-00000041 to attach disk [datastore2] ff645ae7-940e-4842-8915-a96d36d08067/ff645ae7-940e-4842-8915-a96d36d08067.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 890.232401] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c22d4461-99bf-4a88-8566-1b3d48f98542 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.241518] env[69994]: DEBUG oslo_vmware.api [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 890.241518] env[69994]: value = "task-3241990" [ 890.241518] env[69994]: _type = "Task" [ 890.241518] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.250219] env[69994]: DEBUG oslo_vmware.api [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241990, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.610071] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.599s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 890.610071] env[69994]: INFO nova.compute.manager [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Migrating [ 890.617186] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.444s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 890.618806] env[69994]: INFO nova.compute.claims [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 890.621987] env[69994]: DEBUG oslo_concurrency.lockutils [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Releasing lock "refresh_cache-e3697388-4598-4dde-8c20-43fc7665083b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 890.622378] env[69994]: DEBUG nova.compute.manager [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Instance network_info: |[{"id": "772e74a8-0286-4533-b4ad-63afc1c9e6e4", "address": "fa:16:3e:b9:e0:62", "network": {"id": "2e51d0d8-050c-40b5-b20d-b754df62b09e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-955665594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95dff361679f4d3eb08daf6701c7ab82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d31a554-a94c-4471-892f-f65aa87b8279", "external-id": "nsx-vlan-transportzone-241", "segmentation_id": 241, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap772e74a8-02", "ovs_interfaceid": "772e74a8-0286-4533-b4ad-63afc1c9e6e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 890.626254] env[69994]: DEBUG oslo_concurrency.lockutils [req-7da1e6d7-f92e-46e0-aea1-bdb52794ff99 req-4c29a59d-c9a6-49c7-bc11-998e91f465d3 service nova] Acquired lock "refresh_cache-e3697388-4598-4dde-8c20-43fc7665083b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 890.626556] env[69994]: DEBUG nova.network.neutron [req-7da1e6d7-f92e-46e0-aea1-bdb52794ff99 req-4c29a59d-c9a6-49c7-bc11-998e91f465d3 service nova] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Refreshing network info cache for port 772e74a8-0286-4533-b4ad-63afc1c9e6e4 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 890.627807] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b9:e0:62', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3d31a554-a94c-4471-892f-f65aa87b8279', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '772e74a8-0286-4533-b4ad-63afc1c9e6e4', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 890.636665] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 890.643115] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 890.643346] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-090dcf36-a601-43a5-868a-4d09990c1e95 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.666420] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 890.666420] env[69994]: value = "task-3241991" [ 890.666420] env[69994]: _type = "Task" [ 890.666420] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.677252] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241991, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.751928] env[69994]: DEBUG oslo_vmware.api [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241990, 'name': Rename_Task, 'duration_secs': 0.154201} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.752594] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 890.752594] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ad3cb6c6-7915-4264-90d0-d0766ef746a4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.758949] env[69994]: DEBUG oslo_vmware.api [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 890.758949] env[69994]: value = "task-3241992" [ 890.758949] env[69994]: _type = "Task" [ 890.758949] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.767437] env[69994]: DEBUG oslo_vmware.api [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241992, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.774028] env[69994]: DEBUG nova.objects.instance [None req-95b45864-825b-4791-9876-d2fc96222cd2 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lazy-loading 'flavor' on Instance uuid 0b975ce0-40a4-48a9-a046-66227636d496 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 890.816253] env[69994]: DEBUG nova.compute.manager [req-8e318f12-c136-40e6-9421-5094d1da9e1d req-9c6ef708-d95d-4e1a-aa1f-7df377e626b0 service nova] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Received event network-changed-37af0480-c14f-4941-b963-b25c22c833b3 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 890.816447] env[69994]: DEBUG nova.compute.manager [req-8e318f12-c136-40e6-9421-5094d1da9e1d req-9c6ef708-d95d-4e1a-aa1f-7df377e626b0 service nova] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Refreshing instance network info cache due to event network-changed-37af0480-c14f-4941-b963-b25c22c833b3. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 890.816663] env[69994]: DEBUG oslo_concurrency.lockutils [req-8e318f12-c136-40e6-9421-5094d1da9e1d req-9c6ef708-d95d-4e1a-aa1f-7df377e626b0 service nova] Acquiring lock "refresh_cache-ef410b09-8686-409e-8391-d50cd0e0df04" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.816926] env[69994]: DEBUG oslo_concurrency.lockutils [req-8e318f12-c136-40e6-9421-5094d1da9e1d req-9c6ef708-d95d-4e1a-aa1f-7df377e626b0 service nova] Acquired lock "refresh_cache-ef410b09-8686-409e-8391-d50cd0e0df04" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 890.816967] env[69994]: DEBUG nova.network.neutron [req-8e318f12-c136-40e6-9421-5094d1da9e1d req-9c6ef708-d95d-4e1a-aa1f-7df377e626b0 service nova] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Refreshing network info cache for port 37af0480-c14f-4941-b963-b25c22c833b3 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 891.145439] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "refresh_cache-45a8dced-6c49-441c-92e2-ee323ed8753c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.145679] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquired lock "refresh_cache-45a8dced-6c49-441c-92e2-ee323ed8753c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 891.146012] env[69994]: DEBUG nova.network.neutron [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 891.178226] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3241991, 'name': CreateVM_Task, 'duration_secs': 0.426622} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.180830] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 891.181589] env[69994]: DEBUG oslo_concurrency.lockutils [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.181783] env[69994]: DEBUG oslo_concurrency.lockutils [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 891.182334] env[69994]: DEBUG oslo_concurrency.lockutils [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 891.183031] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-379cb77c-a726-45ae-bac7-b0ccd8011971 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.187630] env[69994]: DEBUG oslo_vmware.api [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Waiting for the task: (returnval){ [ 891.187630] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52f942a7-2663-0aaa-4d47-c0a8da9a0532" [ 891.187630] env[69994]: _type = "Task" [ 891.187630] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.196236] env[69994]: DEBUG oslo_vmware.api [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52f942a7-2663-0aaa-4d47-c0a8da9a0532, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.269975] env[69994]: DEBUG oslo_vmware.api [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3241992, 'name': PowerOnVM_Task, 'duration_secs': 0.483089} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.270177] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 891.270383] env[69994]: INFO nova.compute.manager [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Took 8.19 seconds to spawn the instance on the hypervisor. [ 891.270577] env[69994]: DEBUG nova.compute.manager [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 891.271381] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b64a57f-9fcb-48b2-be00-f9c09ca0e013 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.284934] env[69994]: DEBUG oslo_concurrency.lockutils [None req-95b45864-825b-4791-9876-d2fc96222cd2 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "0b975ce0-40a4-48a9-a046-66227636d496" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.847s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 891.429251] env[69994]: DEBUG nova.network.neutron [req-7da1e6d7-f92e-46e0-aea1-bdb52794ff99 req-4c29a59d-c9a6-49c7-bc11-998e91f465d3 service nova] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Updated VIF entry in instance network info cache for port 772e74a8-0286-4533-b4ad-63afc1c9e6e4. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 891.429683] env[69994]: DEBUG nova.network.neutron [req-7da1e6d7-f92e-46e0-aea1-bdb52794ff99 req-4c29a59d-c9a6-49c7-bc11-998e91f465d3 service nova] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Updating instance_info_cache with network_info: [{"id": "772e74a8-0286-4533-b4ad-63afc1c9e6e4", "address": "fa:16:3e:b9:e0:62", "network": {"id": "2e51d0d8-050c-40b5-b20d-b754df62b09e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-955665594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95dff361679f4d3eb08daf6701c7ab82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d31a554-a94c-4471-892f-f65aa87b8279", "external-id": "nsx-vlan-transportzone-241", "segmentation_id": 241, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap772e74a8-02", "ovs_interfaceid": "772e74a8-0286-4533-b4ad-63afc1c9e6e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.568108] env[69994]: DEBUG nova.network.neutron [req-8e318f12-c136-40e6-9421-5094d1da9e1d req-9c6ef708-d95d-4e1a-aa1f-7df377e626b0 service nova] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Updated VIF entry in instance network info cache for port 37af0480-c14f-4941-b963-b25c22c833b3. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 891.568744] env[69994]: DEBUG nova.network.neutron [req-8e318f12-c136-40e6-9421-5094d1da9e1d req-9c6ef708-d95d-4e1a-aa1f-7df377e626b0 service nova] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Updating instance_info_cache with network_info: [{"id": "37af0480-c14f-4941-b963-b25c22c833b3", "address": "fa:16:3e:57:78:19", "network": {"id": "36dc1c22-7ec0-41a6-a3ca-873fea58c351", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1496166079-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30a9ea2f804f49ec8c5c6861b507454e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37af0480-c1", "ovs_interfaceid": "37af0480-c14f-4941-b963-b25c22c833b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.699777] env[69994]: DEBUG oslo_vmware.api [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52f942a7-2663-0aaa-4d47-c0a8da9a0532, 'name': SearchDatastore_Task, 'duration_secs': 0.011945} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.699777] env[69994]: DEBUG oslo_concurrency.lockutils [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 891.700163] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 891.700202] env[69994]: DEBUG oslo_concurrency.lockutils [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.700340] env[69994]: DEBUG oslo_concurrency.lockutils [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 891.700521] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 891.700785] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-66bb1759-e898-4163-b0f1-defbbf30878c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.712901] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 891.713092] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 891.713805] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7303480d-7dff-4836-8cdd-01576a1b1b3e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.718647] env[69994]: DEBUG oslo_vmware.api [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Waiting for the task: (returnval){ [ 891.718647] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52aa355b-7868-ff7d-0c7a-8773b5e744ee" [ 891.718647] env[69994]: _type = "Task" [ 891.718647] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.728303] env[69994]: DEBUG oslo_vmware.api [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52aa355b-7868-ff7d-0c7a-8773b5e744ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.795234] env[69994]: INFO nova.compute.manager [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Took 53.61 seconds to build instance. [ 891.866261] env[69994]: DEBUG nova.network.neutron [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Updating instance_info_cache with network_info: [{"id": "003af7d4-a8a5-43d4-b032-96df0b4ae173", "address": "fa:16:3e:a4:62:49", "network": {"id": "6d4a143e-4f55-4918-8454-462892aacf0e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1594130263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "605d72502cc644bfa4d875bf348246de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52e117d3-d120-42c6-8e72-70085845acbf", "external-id": "nsx-vlan-transportzone-934", "segmentation_id": 934, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap003af7d4-a8", "ovs_interfaceid": "003af7d4-a8a5-43d4-b032-96df0b4ae173", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.932782] env[69994]: DEBUG oslo_concurrency.lockutils [req-7da1e6d7-f92e-46e0-aea1-bdb52794ff99 req-4c29a59d-c9a6-49c7-bc11-998e91f465d3 service nova] Releasing lock "refresh_cache-e3697388-4598-4dde-8c20-43fc7665083b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 891.989222] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "0b975ce0-40a4-48a9-a046-66227636d496" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 891.989636] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "0b975ce0-40a4-48a9-a046-66227636d496" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 891.989863] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "0b975ce0-40a4-48a9-a046-66227636d496-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 891.990075] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "0b975ce0-40a4-48a9-a046-66227636d496-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 891.990260] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "0b975ce0-40a4-48a9-a046-66227636d496-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 891.994205] env[69994]: INFO nova.compute.manager [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Terminating instance [ 892.037228] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfc722fc-0a9c-402d-941c-a5bed4eee8c3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.045093] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f162a597-803f-4ccf-8453-425869baad2e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.077300] env[69994]: DEBUG oslo_concurrency.lockutils [req-8e318f12-c136-40e6-9421-5094d1da9e1d req-9c6ef708-d95d-4e1a-aa1f-7df377e626b0 service nova] Releasing lock "refresh_cache-ef410b09-8686-409e-8391-d50cd0e0df04" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 892.078829] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bfcfdd7-a39e-4857-a451-c79f2fb1e47a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.087476] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b4e17c4-d205-4242-9c37-d9092042dc0c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.101618] env[69994]: DEBUG nova.compute.provider_tree [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 892.127092] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67571fc5-7026-4a5b-a21e-08f7698c8c9c tempest-ServersAdminTestJSON-439489043 tempest-ServersAdminTestJSON-439489043-project-admin] Acquiring lock "refresh_cache-ff645ae7-940e-4842-8915-a96d36d08067" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.127250] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67571fc5-7026-4a5b-a21e-08f7698c8c9c tempest-ServersAdminTestJSON-439489043 tempest-ServersAdminTestJSON-439489043-project-admin] Acquired lock "refresh_cache-ff645ae7-940e-4842-8915-a96d36d08067" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 892.127422] env[69994]: DEBUG nova.network.neutron [None req-67571fc5-7026-4a5b-a21e-08f7698c8c9c tempest-ServersAdminTestJSON-439489043 tempest-ServersAdminTestJSON-439489043-project-admin] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 892.231320] env[69994]: DEBUG oslo_vmware.api [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52aa355b-7868-ff7d-0c7a-8773b5e744ee, 'name': SearchDatastore_Task, 'duration_secs': 0.04761} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.232438] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e10703e-f6ae-435e-80d5-96b5310abcf1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.237713] env[69994]: DEBUG oslo_vmware.api [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Waiting for the task: (returnval){ [ 892.237713] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52d3ac16-f83c-5934-a6e8-37eec51713d9" [ 892.237713] env[69994]: _type = "Task" [ 892.237713] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.245582] env[69994]: DEBUG oslo_vmware.api [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52d3ac16-f83c-5934-a6e8-37eec51713d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.297297] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59a9a18d-df2e-49ea-858d-2abcc72ca781 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "ff645ae7-940e-4842-8915-a96d36d08067" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.540s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 892.372346] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Releasing lock "refresh_cache-45a8dced-6c49-441c-92e2-ee323ed8753c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 892.498465] env[69994]: DEBUG nova.compute.manager [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 892.498741] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 892.499056] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-35579988-9a3d-4820-a554-abc08a3d40dd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.507393] env[69994]: DEBUG oslo_vmware.api [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 892.507393] env[69994]: value = "task-3241993" [ 892.507393] env[69994]: _type = "Task" [ 892.507393] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.515799] env[69994]: DEBUG oslo_vmware.api [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3241993, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.605079] env[69994]: DEBUG nova.scheduler.client.report [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 892.750197] env[69994]: DEBUG oslo_vmware.api [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52d3ac16-f83c-5934-a6e8-37eec51713d9, 'name': SearchDatastore_Task, 'duration_secs': 0.038015} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.750512] env[69994]: DEBUG oslo_concurrency.lockutils [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 892.750779] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] e3697388-4598-4dde-8c20-43fc7665083b/e3697388-4598-4dde-8c20-43fc7665083b.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 892.751042] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b6ab9e60-cd23-404c-9727-ea4d7e689003 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.758591] env[69994]: DEBUG oslo_vmware.api [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Waiting for the task: (returnval){ [ 892.758591] env[69994]: value = "task-3241994" [ 892.758591] env[69994]: _type = "Task" [ 892.758591] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.766677] env[69994]: DEBUG oslo_vmware.api [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': task-3241994, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.852069] env[69994]: DEBUG nova.network.neutron [None req-67571fc5-7026-4a5b-a21e-08f7698c8c9c tempest-ServersAdminTestJSON-439489043 tempest-ServersAdminTestJSON-439489043-project-admin] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Updating instance_info_cache with network_info: [{"id": "d2919329-57fe-4483-b8d9-754310db51d9", "address": "fa:16:3e:2b:fc:69", "network": {"id": "dc86f717-1eb9-419c-aa6f-f5dd10804e22", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-830745446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5acf9a4a9344d4c9c91b75e83cf7a76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c66a277b-e3bf-43b8-a632-04fdd0720b91", "external-id": "nsx-vlan-transportzone-665", "segmentation_id": 665, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2919329-57", "ovs_interfaceid": "d2919329-57fe-4483-b8d9-754310db51d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.019723] env[69994]: DEBUG oslo_vmware.api [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3241993, 'name': PowerOffVM_Task, 'duration_secs': 0.405706} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.020028] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 893.020284] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Volume detach. Driver type: vmdk {{(pid=69994) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 893.020492] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647919', 'volume_id': 'ced1da45-c861-43e5-861a-08efce363437', 'name': 'volume-ced1da45-c861-43e5-861a-08efce363437', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0b975ce0-40a4-48a9-a046-66227636d496', 'attached_at': '', 'detached_at': '', 'volume_id': 'ced1da45-c861-43e5-861a-08efce363437', 'serial': 'ced1da45-c861-43e5-861a-08efce363437'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 893.021312] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e94d6c3e-6d54-497b-93a3-7fd88c69ee4d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.763666] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.147s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 893.764208] env[69994]: DEBUG nova.compute.manager [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 893.767275] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67571fc5-7026-4a5b-a21e-08f7698c8c9c tempest-ServersAdminTestJSON-439489043 tempest-ServersAdminTestJSON-439489043-project-admin] Releasing lock "refresh_cache-ff645ae7-940e-4842-8915-a96d36d08067" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 893.767518] env[69994]: DEBUG nova.compute.manager [None req-67571fc5-7026-4a5b-a21e-08f7698c8c9c tempest-ServersAdminTestJSON-439489043 tempest-ServersAdminTestJSON-439489043-project-admin] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Inject network info {{(pid=69994) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 893.767930] env[69994]: DEBUG nova.compute.manager [None req-67571fc5-7026-4a5b-a21e-08f7698c8c9c tempest-ServersAdminTestJSON-439489043 tempest-ServersAdminTestJSON-439489043-project-admin] [instance: ff645ae7-940e-4842-8915-a96d36d08067] network_info to inject: |[{"id": "d2919329-57fe-4483-b8d9-754310db51d9", "address": "fa:16:3e:2b:fc:69", "network": {"id": "dc86f717-1eb9-419c-aa6f-f5dd10804e22", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-830745446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5acf9a4a9344d4c9c91b75e83cf7a76", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c66a277b-e3bf-43b8-a632-04fdd0720b91", "external-id": "nsx-vlan-transportzone-665", "segmentation_id": 665, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2919329-57", "ovs_interfaceid": "d2919329-57fe-4483-b8d9-754310db51d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 893.772466] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-67571fc5-7026-4a5b-a21e-08f7698c8c9c tempest-ServersAdminTestJSON-439489043 tempest-ServersAdminTestJSON-439489043-project-admin] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Reconfiguring VM instance to set the machine id {{(pid=69994) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 893.775193] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8b79b1eb-c143-483e-a9f2-4eb3cb520791 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.993s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 893.778543] env[69994]: DEBUG nova.objects.instance [None req-8b79b1eb-c143-483e-a9f2-4eb3cb520791 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Lazy-loading 'resources' on Instance uuid ffe5f2c6-69e7-4bdb-80d1-b421b695e790 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 893.779738] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ccfd5a7-5034-4c1d-bfef-a5639a84ee9f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.789930] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d421e69b-d0e6-42ac-a3d8-fbd8cecec011 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.801633] env[69994]: DEBUG oslo_vmware.api [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': task-3241994, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.470523} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.803233] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a01bdca0-a6f3-4a50-b743-48c0cef20dee {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.805709] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] e3697388-4598-4dde-8c20-43fc7665083b/e3697388-4598-4dde-8c20-43fc7665083b.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 893.805929] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 893.806250] env[69994]: DEBUG oslo_vmware.api [None req-67571fc5-7026-4a5b-a21e-08f7698c8c9c tempest-ServersAdminTestJSON-439489043 tempest-ServersAdminTestJSON-439489043-project-admin] Waiting for the task: (returnval){ [ 893.806250] env[69994]: value = "task-3241995" [ 893.806250] env[69994]: _type = "Task" [ 893.806250] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.806675] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8e033d4b-f9d0-4247-bded-1af712c1041f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.829191] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9168be0d-a19c-4abb-ac2c-2aeef132efaf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.836282] env[69994]: DEBUG oslo_vmware.api [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Waiting for the task: (returnval){ [ 893.836282] env[69994]: value = "task-3241996" [ 893.836282] env[69994]: _type = "Task" [ 893.836282] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.836468] env[69994]: DEBUG oslo_vmware.api [None req-67571fc5-7026-4a5b-a21e-08f7698c8c9c tempest-ServersAdminTestJSON-439489043 tempest-ServersAdminTestJSON-439489043-project-admin] Task: {'id': task-3241995, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.850413] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] The volume has not been displaced from its original location: [datastore2] volume-ced1da45-c861-43e5-861a-08efce363437/volume-ced1da45-c861-43e5-861a-08efce363437.vmdk. No consolidation needed. {{(pid=69994) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 893.855625] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Reconfiguring VM instance instance-0000003e to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 893.856612] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8f1b7afb-7d51-405e-b4eb-27419bc0e283 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.872719] env[69994]: DEBUG oslo_vmware.api [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': task-3241996, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.877779] env[69994]: DEBUG oslo_vmware.api [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 893.877779] env[69994]: value = "task-3241997" [ 893.877779] env[69994]: _type = "Task" [ 893.877779] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.885563] env[69994]: DEBUG oslo_vmware.api [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3241997, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.277148] env[69994]: DEBUG nova.compute.utils [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 894.278694] env[69994]: DEBUG nova.compute.manager [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 894.278788] env[69994]: DEBUG nova.network.neutron [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 894.299975] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0adc5aad-aae7-483d-a289-8acf27da13db {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.329896] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Updating instance '45a8dced-6c49-441c-92e2-ee323ed8753c' progress to 0 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 894.337270] env[69994]: DEBUG nova.policy [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cde99e8c6c5b479f82c5267140d2e6d8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5a8bdfb28b6d40708b65bc12a4bcbb7d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 894.349913] env[69994]: DEBUG oslo_vmware.api [None req-67571fc5-7026-4a5b-a21e-08f7698c8c9c tempest-ServersAdminTestJSON-439489043 tempest-ServersAdminTestJSON-439489043-project-admin] Task: {'id': task-3241995, 'name': ReconfigVM_Task, 'duration_secs': 0.326482} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.352790] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-67571fc5-7026-4a5b-a21e-08f7698c8c9c tempest-ServersAdminTestJSON-439489043 tempest-ServersAdminTestJSON-439489043-project-admin] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Reconfigured VM instance to set the machine id {{(pid=69994) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 894.353743] env[69994]: DEBUG oslo_vmware.api [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': task-3241996, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.283607} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.353743] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 894.354090] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffaf94d8-c0a4-4f4c-98c2-a0941642683c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.378258] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Reconfiguring VM instance instance-00000042 to attach disk [datastore2] e3697388-4598-4dde-8c20-43fc7665083b/e3697388-4598-4dde-8c20-43fc7665083b.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 894.381724] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d10b5bb8-66a7-4458-bacc-74b1f2e140e5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.404912] env[69994]: DEBUG oslo_vmware.api [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3241997, 'name': ReconfigVM_Task, 'duration_secs': 0.411287} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.406531] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Reconfigured VM instance instance-0000003e to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 894.411282] env[69994]: DEBUG oslo_vmware.api [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Waiting for the task: (returnval){ [ 894.411282] env[69994]: value = "task-3241998" [ 894.411282] env[69994]: _type = "Task" [ 894.411282] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.413878] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c2c5ce9f-0094-470f-b436-af02fbbe6c5e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.434575] env[69994]: DEBUG oslo_vmware.api [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': task-3241998, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.435987] env[69994]: DEBUG oslo_vmware.api [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 894.435987] env[69994]: value = "task-3241999" [ 894.435987] env[69994]: _type = "Task" [ 894.435987] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.447024] env[69994]: DEBUG oslo_vmware.api [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3241999, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.531153] env[69994]: INFO nova.compute.manager [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Rebuilding instance [ 894.584788] env[69994]: DEBUG nova.compute.manager [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 894.585663] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28fee514-56bc-4fc4-9603-6abd5b07e954 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.670143] env[69994]: DEBUG nova.network.neutron [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Successfully created port: 596c8b01-0040-4d26-9668-1847a813bc88 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 894.782686] env[69994]: DEBUG nova.compute.manager [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 894.838995] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b568636-cf9a-4e64-b1cc-29fb16e77b7b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.843568] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 894.844226] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4c5a7408-ea4c-4831-aa54-5103014aa1c1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.848937] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c365cf3-6c85-40d5-b193-b100b63c18a6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.852976] env[69994]: DEBUG oslo_vmware.api [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 894.852976] env[69994]: value = "task-3242000" [ 894.852976] env[69994]: _type = "Task" [ 894.852976] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.886020] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0544d6d-f6dd-4443-9c46-a638642b858a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.891462] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] VM already powered off {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 894.891683] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Updating instance '45a8dced-6c49-441c-92e2-ee323ed8753c' progress to 17 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 894.898924] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d568b9a9-9c77-40a4-8e7b-4546b9f2aa8f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.912508] env[69994]: DEBUG nova.compute.provider_tree [None req-8b79b1eb-c143-483e-a9f2-4eb3cb520791 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 894.936026] env[69994]: DEBUG oslo_vmware.api [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': task-3241998, 'name': ReconfigVM_Task, 'duration_secs': 0.332475} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.936159] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Reconfigured VM instance instance-00000042 to attach disk [datastore2] e3697388-4598-4dde-8c20-43fc7665083b/e3697388-4598-4dde-8c20-43fc7665083b.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 894.939690] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-49f4a335-8542-4daa-bc2b-c08adc96da94 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.946370] env[69994]: DEBUG oslo_vmware.api [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3241999, 'name': ReconfigVM_Task, 'duration_secs': 0.282526} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.947504] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647919', 'volume_id': 'ced1da45-c861-43e5-861a-08efce363437', 'name': 'volume-ced1da45-c861-43e5-861a-08efce363437', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0b975ce0-40a4-48a9-a046-66227636d496', 'attached_at': '', 'detached_at': '', 'volume_id': 'ced1da45-c861-43e5-861a-08efce363437', 'serial': 'ced1da45-c861-43e5-861a-08efce363437'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 894.947773] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 894.948078] env[69994]: DEBUG oslo_vmware.api [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Waiting for the task: (returnval){ [ 894.948078] env[69994]: value = "task-3242001" [ 894.948078] env[69994]: _type = "Task" [ 894.948078] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.948727] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12ad890c-f885-4110-973b-735e5bfd9001 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.957536] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 894.960452] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c56333e3-2752-4897-a1de-0181a9f156fa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.961734] env[69994]: DEBUG oslo_vmware.api [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': task-3242001, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.401236] env[69994]: DEBUG nova.virt.hardware [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 895.401499] env[69994]: DEBUG nova.virt.hardware [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 895.401686] env[69994]: DEBUG nova.virt.hardware [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 895.401891] env[69994]: DEBUG nova.virt.hardware [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 895.402059] env[69994]: DEBUG nova.virt.hardware [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 895.402232] env[69994]: DEBUG nova.virt.hardware [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 895.402439] env[69994]: DEBUG nova.virt.hardware [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 895.402663] env[69994]: DEBUG nova.virt.hardware [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 895.402784] env[69994]: DEBUG nova.virt.hardware [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 895.403032] env[69994]: DEBUG nova.virt.hardware [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 895.403200] env[69994]: DEBUG nova.virt.hardware [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 895.408601] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d373d98b-1ae6-47d5-9ea4-8581bc081de8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.421017] env[69994]: DEBUG nova.scheduler.client.report [None req-8b79b1eb-c143-483e-a9f2-4eb3cb520791 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 895.431144] env[69994]: DEBUG oslo_vmware.api [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 895.431144] env[69994]: value = "task-3242003" [ 895.431144] env[69994]: _type = "Task" [ 895.431144] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.440173] env[69994]: DEBUG oslo_vmware.api [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242003, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.463122] env[69994]: DEBUG oslo_vmware.api [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': task-3242001, 'name': Rename_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.604416] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 895.604744] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-25c8230c-8af8-41ea-a3e7-94e1386aab03 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.611752] env[69994]: DEBUG oslo_vmware.api [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 895.611752] env[69994]: value = "task-3242004" [ 895.611752] env[69994]: _type = "Task" [ 895.611752] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.624502] env[69994]: DEBUG oslo_vmware.api [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242004, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.794270] env[69994]: DEBUG nova.compute.manager [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 895.822956] env[69994]: DEBUG nova.virt.hardware [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 895.823266] env[69994]: DEBUG nova.virt.hardware [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 895.823480] env[69994]: DEBUG nova.virt.hardware [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 895.823675] env[69994]: DEBUG nova.virt.hardware [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 895.823857] env[69994]: DEBUG nova.virt.hardware [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 895.824058] env[69994]: DEBUG nova.virt.hardware [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 895.824313] env[69994]: DEBUG nova.virt.hardware [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 895.824486] env[69994]: DEBUG nova.virt.hardware [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 895.824683] env[69994]: DEBUG nova.virt.hardware [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 895.824872] env[69994]: DEBUG nova.virt.hardware [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 895.825121] env[69994]: DEBUG nova.virt.hardware [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 895.826057] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d90f371a-eb81-420d-8a86-5a984340992d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.834264] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b8029e4-6a31-46ce-90c3-640f40798141 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.852697] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 895.852919] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 895.853114] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Deleting the datastore file [datastore1] 0b975ce0-40a4-48a9-a046-66227636d496 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 895.853381] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1db71e5a-94f6-4f31-a24c-3ac140e59678 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.858967] env[69994]: DEBUG oslo_vmware.api [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 895.858967] env[69994]: value = "task-3242005" [ 895.858967] env[69994]: _type = "Task" [ 895.858967] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.866374] env[69994]: DEBUG oslo_vmware.api [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242005, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.926487] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8b79b1eb-c143-483e-a9f2-4eb3cb520791 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.151s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 895.929664] env[69994]: DEBUG oslo_concurrency.lockutils [None req-66cd32c1-ba92-4044-8e03-4bef98c533a2 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.855s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 895.930121] env[69994]: DEBUG nova.objects.instance [None req-66cd32c1-ba92-4044-8e03-4bef98c533a2 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Lazy-loading 'resources' on Instance uuid 214b3508-6fb9-455e-be6b-bd9f6902b7ae {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 895.943227] env[69994]: DEBUG oslo_vmware.api [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242003, 'name': ReconfigVM_Task, 'duration_secs': 0.291493} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.944605] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Updating instance '45a8dced-6c49-441c-92e2-ee323ed8753c' progress to 33 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 895.964032] env[69994]: INFO nova.scheduler.client.report [None req-8b79b1eb-c143-483e-a9f2-4eb3cb520791 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Deleted allocations for instance ffe5f2c6-69e7-4bdb-80d1-b421b695e790 [ 895.965010] env[69994]: DEBUG oslo_vmware.api [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': task-3242001, 'name': Rename_Task, 'duration_secs': 0.885844} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.967592] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 895.968025] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-410bdfcd-eb75-467f-8a7e-b710dcd4a968 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.974501] env[69994]: DEBUG oslo_vmware.api [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Waiting for the task: (returnval){ [ 895.974501] env[69994]: value = "task-3242006" [ 895.974501] env[69994]: _type = "Task" [ 895.974501] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.982879] env[69994]: DEBUG oslo_vmware.api [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': task-3242006, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.071767] env[69994]: DEBUG nova.compute.manager [req-9afce13f-ac4c-44aa-bfcc-6089a90d434c req-470b868e-c7bf-4e64-a964-c45600237777 service nova] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Received event network-vif-plugged-596c8b01-0040-4d26-9668-1847a813bc88 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 896.072052] env[69994]: DEBUG oslo_concurrency.lockutils [req-9afce13f-ac4c-44aa-bfcc-6089a90d434c req-470b868e-c7bf-4e64-a964-c45600237777 service nova] Acquiring lock "c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 896.072216] env[69994]: DEBUG oslo_concurrency.lockutils [req-9afce13f-ac4c-44aa-bfcc-6089a90d434c req-470b868e-c7bf-4e64-a964-c45600237777 service nova] Lock "c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 896.072385] env[69994]: DEBUG oslo_concurrency.lockutils [req-9afce13f-ac4c-44aa-bfcc-6089a90d434c req-470b868e-c7bf-4e64-a964-c45600237777 service nova] Lock "c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 896.072549] env[69994]: DEBUG nova.compute.manager [req-9afce13f-ac4c-44aa-bfcc-6089a90d434c req-470b868e-c7bf-4e64-a964-c45600237777 service nova] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] No waiting events found dispatching network-vif-plugged-596c8b01-0040-4d26-9668-1847a813bc88 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 896.072709] env[69994]: WARNING nova.compute.manager [req-9afce13f-ac4c-44aa-bfcc-6089a90d434c req-470b868e-c7bf-4e64-a964-c45600237777 service nova] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Received unexpected event network-vif-plugged-596c8b01-0040-4d26-9668-1847a813bc88 for instance with vm_state building and task_state spawning. [ 896.124284] env[69994]: DEBUG oslo_vmware.api [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242004, 'name': PowerOffVM_Task, 'duration_secs': 0.268137} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.124284] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 896.124284] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 896.124284] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7110a0f5-f993-413a-81f6-949b83e5d683 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.129780] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 896.130648] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1af26353-3ae3-4593-a004-bd5689cc0620 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.175376] env[69994]: DEBUG nova.network.neutron [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Successfully updated port: 596c8b01-0040-4d26-9668-1847a813bc88 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 896.209242] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 896.209533] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 896.209777] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Deleting the datastore file [datastore2] 87473dd1-458d-4ef4-a1bd-7e653e509ea4 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 896.210088] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3d778e9c-ad04-4a09-b943-df0a9cec4eae {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.216783] env[69994]: DEBUG oslo_vmware.api [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 896.216783] env[69994]: value = "task-3242008" [ 896.216783] env[69994]: _type = "Task" [ 896.216783] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.224803] env[69994]: DEBUG oslo_vmware.api [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242008, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.369571] env[69994]: DEBUG oslo_vmware.api [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242005, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.273227} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.369858] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 896.370075] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 896.370273] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 896.370485] env[69994]: INFO nova.compute.manager [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Took 3.87 seconds to destroy the instance on the hypervisor. [ 896.370754] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 896.370992] env[69994]: DEBUG nova.compute.manager [-] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 896.371138] env[69994]: DEBUG nova.network.neutron [-] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 896.451285] env[69994]: DEBUG nova.virt.hardware [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 896.451580] env[69994]: DEBUG nova.virt.hardware [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 896.451786] env[69994]: DEBUG nova.virt.hardware [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 896.452018] env[69994]: DEBUG nova.virt.hardware [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 896.452219] env[69994]: DEBUG nova.virt.hardware [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 896.452405] env[69994]: DEBUG nova.virt.hardware [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 896.452656] env[69994]: DEBUG nova.virt.hardware [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 896.452849] env[69994]: DEBUG nova.virt.hardware [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 896.453073] env[69994]: DEBUG nova.virt.hardware [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 896.453343] env[69994]: DEBUG nova.virt.hardware [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 896.453563] env[69994]: DEBUG nova.virt.hardware [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 896.459623] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Reconfiguring VM instance instance-00000015 to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 896.460267] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-240ce680-5435-47be-ae98-d55ec4bf11c4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.485108] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8b79b1eb-c143-483e-a9f2-4eb3cb520791 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Lock "ffe5f2c6-69e7-4bdb-80d1-b421b695e790" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.248s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 896.492106] env[69994]: DEBUG oslo_vmware.api [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': task-3242006, 'name': PowerOnVM_Task, 'duration_secs': 0.513911} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.493337] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 896.493554] env[69994]: INFO nova.compute.manager [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Took 8.63 seconds to spawn the instance on the hypervisor. [ 896.493734] env[69994]: DEBUG nova.compute.manager [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 896.494082] env[69994]: DEBUG oslo_vmware.api [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 896.494082] env[69994]: value = "task-3242009" [ 896.494082] env[69994]: _type = "Task" [ 896.494082] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.498477] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-591f91da-96bf-4a85-b895-69bd1d9c3f8a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.513589] env[69994]: DEBUG oslo_vmware.api [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242009, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.681284] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Acquiring lock "refresh_cache-c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.681284] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Acquired lock "refresh_cache-c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 896.681284] env[69994]: DEBUG nova.network.neutron [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 896.727544] env[69994]: DEBUG oslo_vmware.api [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242008, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.248505} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.730193] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 896.730410] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 896.730605] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 896.926456] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-838d534c-db2d-4291-a898-34fbba2bb4d8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.934172] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18b0b859-a89e-4a8a-91d8-5c27bd91c9e9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.964145] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0a8b3e6-7147-402b-91c7-c052abfbf3ce {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.971960] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35e23feb-63cb-45fa-8361-06f7b0f5d0d4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.985330] env[69994]: DEBUG nova.compute.provider_tree [None req-66cd32c1-ba92-4044-8e03-4bef98c533a2 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 897.008317] env[69994]: DEBUG oslo_vmware.api [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242009, 'name': ReconfigVM_Task, 'duration_secs': 0.264039} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.008589] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Reconfigured VM instance instance-00000015 to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 897.009408] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44b55f23-8e5e-4f18-9356-d77742cce67c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.039299] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Reconfiguring VM instance instance-00000015 to attach disk [datastore1] 45a8dced-6c49-441c-92e2-ee323ed8753c/45a8dced-6c49-441c-92e2-ee323ed8753c.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 897.041633] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b74a08c-dc35-4d0a-afd9-0603fad370a6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.055539] env[69994]: INFO nova.compute.manager [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Took 48.91 seconds to build instance. [ 897.061693] env[69994]: DEBUG oslo_vmware.api [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 897.061693] env[69994]: value = "task-3242011" [ 897.061693] env[69994]: _type = "Task" [ 897.061693] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.073820] env[69994]: DEBUG oslo_vmware.api [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242011, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.188686] env[69994]: DEBUG nova.network.neutron [-] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.193719] env[69994]: DEBUG nova.compute.manager [req-3e7dd114-7fce-4b32-9b94-c8b311ca52b0 req-f67b76b2-4520-4d08-940a-91ce82c9f5f5 service nova] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Received event network-changed-772e74a8-0286-4533-b4ad-63afc1c9e6e4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 897.193913] env[69994]: DEBUG nova.compute.manager [req-3e7dd114-7fce-4b32-9b94-c8b311ca52b0 req-f67b76b2-4520-4d08-940a-91ce82c9f5f5 service nova] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Refreshing instance network info cache due to event network-changed-772e74a8-0286-4533-b4ad-63afc1c9e6e4. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 897.194141] env[69994]: DEBUG oslo_concurrency.lockutils [req-3e7dd114-7fce-4b32-9b94-c8b311ca52b0 req-f67b76b2-4520-4d08-940a-91ce82c9f5f5 service nova] Acquiring lock "refresh_cache-e3697388-4598-4dde-8c20-43fc7665083b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.194287] env[69994]: DEBUG oslo_concurrency.lockutils [req-3e7dd114-7fce-4b32-9b94-c8b311ca52b0 req-f67b76b2-4520-4d08-940a-91ce82c9f5f5 service nova] Acquired lock "refresh_cache-e3697388-4598-4dde-8c20-43fc7665083b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 897.194443] env[69994]: DEBUG nova.network.neutron [req-3e7dd114-7fce-4b32-9b94-c8b311ca52b0 req-f67b76b2-4520-4d08-940a-91ce82c9f5f5 service nova] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Refreshing network info cache for port 772e74a8-0286-4533-b4ad-63afc1c9e6e4 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 897.222203] env[69994]: DEBUG nova.network.neutron [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 897.356091] env[69994]: DEBUG nova.network.neutron [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Updating instance_info_cache with network_info: [{"id": "596c8b01-0040-4d26-9668-1847a813bc88", "address": "fa:16:3e:32:8f:29", "network": {"id": "d5683c09-8a9c-4640-906c-f1f48a90bcae", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-377918742-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a8bdfb28b6d40708b65bc12a4bcbb7d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap596c8b01-00", "ovs_interfaceid": "596c8b01-0040-4d26-9668-1847a813bc88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.489588] env[69994]: DEBUG nova.scheduler.client.report [None req-66cd32c1-ba92-4044-8e03-4bef98c533a2 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 897.557603] env[69994]: DEBUG oslo_concurrency.lockutils [None req-49e03d9e-7cf3-4e63-ab70-44384c9a6724 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Lock "e3697388-4598-4dde-8c20-43fc7665083b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.643s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 897.571737] env[69994]: DEBUG oslo_vmware.api [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242011, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.692325] env[69994]: INFO nova.compute.manager [-] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Took 1.32 seconds to deallocate network for instance. [ 897.774693] env[69994]: DEBUG nova.virt.hardware [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 897.774948] env[69994]: DEBUG nova.virt.hardware [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 897.775139] env[69994]: DEBUG nova.virt.hardware [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 897.775324] env[69994]: DEBUG nova.virt.hardware [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 897.775472] env[69994]: DEBUG nova.virt.hardware [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 897.775616] env[69994]: DEBUG nova.virt.hardware [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 897.775824] env[69994]: DEBUG nova.virt.hardware [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 897.776109] env[69994]: DEBUG nova.virt.hardware [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 897.776431] env[69994]: DEBUG nova.virt.hardware [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 897.776714] env[69994]: DEBUG nova.virt.hardware [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 897.777094] env[69994]: DEBUG nova.virt.hardware [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 897.778442] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f53d904-a3ad-42c4-8d5a-11dbb17584f8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.787827] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-326d5b7e-0c8b-4453-8011-8bc4a2d7deff {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.803424] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f5299198-1524-4d64-b5ee-d20bc512ceb5 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Acquiring lock "e3697388-4598-4dde-8c20-43fc7665083b" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 897.803743] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f5299198-1524-4d64-b5ee-d20bc512ceb5 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Lock "e3697388-4598-4dde-8c20-43fc7665083b" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 897.804401] env[69994]: INFO nova.compute.manager [None req-f5299198-1524-4d64-b5ee-d20bc512ceb5 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Rebooting instance [ 897.805502] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0a:5e:32', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c66a277b-e3bf-43b8-a632-04fdd0720b91', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b7e8be98-685a-4d07-9440-e07af619b026', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 897.813615] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 897.814612] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 897.814836] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f5bed03f-843e-4df2-ba74-b7b469a085b5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.837029] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 897.837029] env[69994]: value = "task-3242012" [ 897.837029] env[69994]: _type = "Task" [ 897.837029] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.846620] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242012, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.858568] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Releasing lock "refresh_cache-c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 897.859050] env[69994]: DEBUG nova.compute.manager [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Instance network_info: |[{"id": "596c8b01-0040-4d26-9668-1847a813bc88", "address": "fa:16:3e:32:8f:29", "network": {"id": "d5683c09-8a9c-4640-906c-f1f48a90bcae", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-377918742-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a8bdfb28b6d40708b65bc12a4bcbb7d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap596c8b01-00", "ovs_interfaceid": "596c8b01-0040-4d26-9668-1847a813bc88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 897.859473] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:32:8f:29', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8459aaf-d6a8-46fb-ad14-464ac3104695', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '596c8b01-0040-4d26-9668-1847a813bc88', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 897.866600] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Creating folder: Project (5a8bdfb28b6d40708b65bc12a4bcbb7d). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 897.869659] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7dc7c674-26ec-4e57-9aca-67f57f580124 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.879902] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Created folder: Project (5a8bdfb28b6d40708b65bc12a4bcbb7d) in parent group-v647729. [ 897.880089] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Creating folder: Instances. Parent ref: group-v647923. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 897.880333] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-45236663-859b-4bb7-ab0f-b3977045dfec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.889323] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Created folder: Instances in parent group-v647923. [ 897.889555] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 897.889856] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 897.889964] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c0a9e1e7-ad11-46ff-b17e-067f06d4ecc3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.912384] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 897.912384] env[69994]: value = "task-3242015" [ 897.912384] env[69994]: _type = "Task" [ 897.912384] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.920845] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242015, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.937444] env[69994]: DEBUG nova.network.neutron [req-3e7dd114-7fce-4b32-9b94-c8b311ca52b0 req-f67b76b2-4520-4d08-940a-91ce82c9f5f5 service nova] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Updated VIF entry in instance network info cache for port 772e74a8-0286-4533-b4ad-63afc1c9e6e4. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 897.937880] env[69994]: DEBUG nova.network.neutron [req-3e7dd114-7fce-4b32-9b94-c8b311ca52b0 req-f67b76b2-4520-4d08-940a-91ce82c9f5f5 service nova] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Updating instance_info_cache with network_info: [{"id": "772e74a8-0286-4533-b4ad-63afc1c9e6e4", "address": "fa:16:3e:b9:e0:62", "network": {"id": "2e51d0d8-050c-40b5-b20d-b754df62b09e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-955665594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95dff361679f4d3eb08daf6701c7ab82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d31a554-a94c-4471-892f-f65aa87b8279", "external-id": "nsx-vlan-transportzone-241", "segmentation_id": 241, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap772e74a8-02", "ovs_interfaceid": "772e74a8-0286-4533-b4ad-63afc1c9e6e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.994735] env[69994]: DEBUG oslo_concurrency.lockutils [None req-66cd32c1-ba92-4044-8e03-4bef98c533a2 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.066s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 897.997337] env[69994]: DEBUG oslo_concurrency.lockutils [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.328s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 897.998935] env[69994]: INFO nova.compute.claims [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 898.018523] env[69994]: INFO nova.scheduler.client.report [None req-66cd32c1-ba92-4044-8e03-4bef98c533a2 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Deleted allocations for instance 214b3508-6fb9-455e-be6b-bd9f6902b7ae [ 898.072703] env[69994]: DEBUG oslo_vmware.api [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242011, 'name': ReconfigVM_Task, 'duration_secs': 0.93691} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.072995] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Reconfigured VM instance instance-00000015 to attach disk [datastore1] 45a8dced-6c49-441c-92e2-ee323ed8753c/45a8dced-6c49-441c-92e2-ee323ed8753c.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 898.073275] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Updating instance '45a8dced-6c49-441c-92e2-ee323ed8753c' progress to 50 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 898.102215] env[69994]: DEBUG nova.compute.manager [req-40ea49e8-8855-48e8-b49d-3a6216cb890a req-908444e5-15dd-408e-b853-b58f484a8ca0 service nova] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Received event network-changed-596c8b01-0040-4d26-9668-1847a813bc88 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 898.102467] env[69994]: DEBUG nova.compute.manager [req-40ea49e8-8855-48e8-b49d-3a6216cb890a req-908444e5-15dd-408e-b853-b58f484a8ca0 service nova] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Refreshing instance network info cache due to event network-changed-596c8b01-0040-4d26-9668-1847a813bc88. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 898.102674] env[69994]: DEBUG oslo_concurrency.lockutils [req-40ea49e8-8855-48e8-b49d-3a6216cb890a req-908444e5-15dd-408e-b853-b58f484a8ca0 service nova] Acquiring lock "refresh_cache-c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.102823] env[69994]: DEBUG oslo_concurrency.lockutils [req-40ea49e8-8855-48e8-b49d-3a6216cb890a req-908444e5-15dd-408e-b853-b58f484a8ca0 service nova] Acquired lock "refresh_cache-c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 898.102982] env[69994]: DEBUG nova.network.neutron [req-40ea49e8-8855-48e8-b49d-3a6216cb890a req-908444e5-15dd-408e-b853-b58f484a8ca0 service nova] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Refreshing network info cache for port 596c8b01-0040-4d26-9668-1847a813bc88 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 898.236834] env[69994]: INFO nova.compute.manager [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Took 0.54 seconds to detach 1 volumes for instance. [ 898.328950] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f5299198-1524-4d64-b5ee-d20bc512ceb5 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Acquiring lock "refresh_cache-e3697388-4598-4dde-8c20-43fc7665083b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.347374] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242012, 'name': CreateVM_Task, 'duration_secs': 0.372376} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.347374] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 898.347823] env[69994]: DEBUG oslo_concurrency.lockutils [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.347987] env[69994]: DEBUG oslo_concurrency.lockutils [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 898.348321] env[69994]: DEBUG oslo_concurrency.lockutils [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 898.348576] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9cdd19c8-e070-4adf-8639-46094b9a6ec8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.353195] env[69994]: DEBUG oslo_vmware.api [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 898.353195] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52c04572-b040-f2d7-828d-f4798028a768" [ 898.353195] env[69994]: _type = "Task" [ 898.353195] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.360578] env[69994]: DEBUG oslo_vmware.api [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52c04572-b040-f2d7-828d-f4798028a768, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.424673] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242015, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.440626] env[69994]: DEBUG oslo_concurrency.lockutils [req-3e7dd114-7fce-4b32-9b94-c8b311ca52b0 req-f67b76b2-4520-4d08-940a-91ce82c9f5f5 service nova] Releasing lock "refresh_cache-e3697388-4598-4dde-8c20-43fc7665083b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 898.440812] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f5299198-1524-4d64-b5ee-d20bc512ceb5 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Acquired lock "refresh_cache-e3697388-4598-4dde-8c20-43fc7665083b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 898.440999] env[69994]: DEBUG nova.network.neutron [None req-f5299198-1524-4d64-b5ee-d20bc512ceb5 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 898.529846] env[69994]: DEBUG oslo_concurrency.lockutils [None req-66cd32c1-ba92-4044-8e03-4bef98c533a2 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Lock "214b3508-6fb9-455e-be6b-bd9f6902b7ae" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.544s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 898.580108] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6ab438c-3e6b-42ea-a365-ebdfe7fa17b2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.600549] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b637c877-68d6-490d-afe7-d0a1441fdc70 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.620143] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Updating instance '45a8dced-6c49-441c-92e2-ee323ed8753c' progress to 67 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 898.744793] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.817280] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc04a935-34f6-4cea-bdb4-6c613f642321 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Acquiring lock "4dbf53e0-caa1-41f4-8376-dfba8d8567cd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.817540] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc04a935-34f6-4cea-bdb4-6c613f642321 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Lock "4dbf53e0-caa1-41f4-8376-dfba8d8567cd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.817858] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc04a935-34f6-4cea-bdb4-6c613f642321 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Acquiring lock "4dbf53e0-caa1-41f4-8376-dfba8d8567cd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.818084] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc04a935-34f6-4cea-bdb4-6c613f642321 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Lock "4dbf53e0-caa1-41f4-8376-dfba8d8567cd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.818284] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc04a935-34f6-4cea-bdb4-6c613f642321 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Lock "4dbf53e0-caa1-41f4-8376-dfba8d8567cd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 898.820702] env[69994]: INFO nova.compute.manager [None req-fc04a935-34f6-4cea-bdb4-6c613f642321 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Terminating instance [ 898.841084] env[69994]: DEBUG nova.network.neutron [req-40ea49e8-8855-48e8-b49d-3a6216cb890a req-908444e5-15dd-408e-b853-b58f484a8ca0 service nova] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Updated VIF entry in instance network info cache for port 596c8b01-0040-4d26-9668-1847a813bc88. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 898.841084] env[69994]: DEBUG nova.network.neutron [req-40ea49e8-8855-48e8-b49d-3a6216cb890a req-908444e5-15dd-408e-b853-b58f484a8ca0 service nova] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Updating instance_info_cache with network_info: [{"id": "596c8b01-0040-4d26-9668-1847a813bc88", "address": "fa:16:3e:32:8f:29", "network": {"id": "d5683c09-8a9c-4640-906c-f1f48a90bcae", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-377918742-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a8bdfb28b6d40708b65bc12a4bcbb7d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap596c8b01-00", "ovs_interfaceid": "596c8b01-0040-4d26-9668-1847a813bc88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.868646] env[69994]: DEBUG oslo_vmware.api [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52c04572-b040-f2d7-828d-f4798028a768, 'name': SearchDatastore_Task, 'duration_secs': 0.009666} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.868646] env[69994]: DEBUG oslo_concurrency.lockutils [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 898.872036] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 898.872036] env[69994]: DEBUG oslo_concurrency.lockutils [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.872036] env[69994]: DEBUG oslo_concurrency.lockutils [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 898.872036] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 898.872249] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1088e8f1-7d7e-4cb9-9d4b-8965092c2de2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.884984] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 898.885202] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 898.886030] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b62461b2-dcd3-4e80-87bf-f00b1c3a56a8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.893145] env[69994]: DEBUG oslo_vmware.api [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 898.893145] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]522ff8b2-5e3d-a93e-15ea-c83c19478746" [ 898.893145] env[69994]: _type = "Task" [ 898.893145] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.903890] env[69994]: DEBUG oslo_vmware.api [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]522ff8b2-5e3d-a93e-15ea-c83c19478746, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.922288] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242015, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.138409] env[69994]: DEBUG nova.network.neutron [None req-f5299198-1524-4d64-b5ee-d20bc512ceb5 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Updating instance_info_cache with network_info: [{"id": "772e74a8-0286-4533-b4ad-63afc1c9e6e4", "address": "fa:16:3e:b9:e0:62", "network": {"id": "2e51d0d8-050c-40b5-b20d-b754df62b09e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-955665594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95dff361679f4d3eb08daf6701c7ab82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d31a554-a94c-4471-892f-f65aa87b8279", "external-id": "nsx-vlan-transportzone-241", "segmentation_id": 241, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap772e74a8-02", "ovs_interfaceid": "772e74a8-0286-4533-b4ad-63afc1c9e6e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 899.169814] env[69994]: DEBUG nova.network.neutron [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Port 003af7d4-a8a5-43d4-b032-96df0b4ae173 binding to destination host cpu-1 is already ACTIVE {{(pid=69994) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 899.324584] env[69994]: DEBUG nova.compute.manager [None req-fc04a935-34f6-4cea-bdb4-6c613f642321 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 899.324756] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fc04a935-34f6-4cea-bdb4-6c613f642321 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 899.328257] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a73ea1af-be85-4804-a78a-350f3f822403 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.338311] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc04a935-34f6-4cea-bdb4-6c613f642321 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 899.338633] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f8176072-1764-4bac-a26f-93d3428f0177 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.344439] env[69994]: DEBUG oslo_concurrency.lockutils [req-40ea49e8-8855-48e8-b49d-3a6216cb890a req-908444e5-15dd-408e-b853-b58f484a8ca0 service nova] Releasing lock "refresh_cache-c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 899.344685] env[69994]: DEBUG nova.compute.manager [req-40ea49e8-8855-48e8-b49d-3a6216cb890a req-908444e5-15dd-408e-b853-b58f484a8ca0 service nova] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Received event network-vif-deleted-1a8d6b1d-c58d-4d93-9e56-8fb1dffc025c {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 899.345110] env[69994]: DEBUG oslo_vmware.api [None req-fc04a935-34f6-4cea-bdb4-6c613f642321 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Waiting for the task: (returnval){ [ 899.345110] env[69994]: value = "task-3242016" [ 899.345110] env[69994]: _type = "Task" [ 899.345110] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.352857] env[69994]: DEBUG oslo_vmware.api [None req-fc04a935-34f6-4cea-bdb4-6c613f642321 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3242016, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.403501] env[69994]: DEBUG oslo_vmware.api [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]522ff8b2-5e3d-a93e-15ea-c83c19478746, 'name': SearchDatastore_Task, 'duration_secs': 0.013347} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.407229] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a38f3ba-be27-4969-9d50-38836d1d27d0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.413294] env[69994]: DEBUG oslo_vmware.api [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 899.413294] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]527e870f-53f5-0f03-bbf5-cfc0f762a5df" [ 899.413294] env[69994]: _type = "Task" [ 899.413294] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.430505] env[69994]: DEBUG oslo_vmware.api [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]527e870f-53f5-0f03-bbf5-cfc0f762a5df, 'name': SearchDatastore_Task, 'duration_secs': 0.00935} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.433699] env[69994]: DEBUG oslo_concurrency.lockutils [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 899.434036] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 87473dd1-458d-4ef4-a1bd-7e653e509ea4/87473dd1-458d-4ef4-a1bd-7e653e509ea4.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 899.434242] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242015, 'name': CreateVM_Task, 'duration_secs': 1.373802} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.434447] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0688f226-5b5f-4d92-a07b-9c140a1ff10d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.436489] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 899.438212] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.438439] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 899.438795] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 899.439648] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c58aa1fc-2672-4546-a070-88af283346ad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.442981] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0690e371-bfd2-4abf-9566-71d8109fbfd7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.448516] env[69994]: DEBUG oslo_vmware.api [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 899.448516] env[69994]: value = "task-3242017" [ 899.448516] env[69994]: _type = "Task" [ 899.448516] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.457034] env[69994]: DEBUG oslo_vmware.api [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Waiting for the task: (returnval){ [ 899.457034] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52091f58-ee0d-e721-2790-779b06873e4e" [ 899.457034] env[69994]: _type = "Task" [ 899.457034] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.457993] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd151e0c-ccf5-4af1-810b-3e9006775883 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.469069] env[69994]: DEBUG oslo_vmware.api [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242017, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.476064] env[69994]: DEBUG oslo_vmware.api [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52091f58-ee0d-e721-2790-779b06873e4e, 'name': SearchDatastore_Task, 'duration_secs': 0.018528} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.502807] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 899.503129] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 899.503395] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.503547] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 899.503769] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 899.504293] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-756dc799-3b43-4530-aaa7-0a7af79d716b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.506956] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36e787d4-c7e0-47dd-9070-8bd6381486c0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.517187] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85ac2737-0fae-4c5a-a84f-f82931d230ab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.521608] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 899.521810] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 899.522610] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d5289ae-d7e7-46d7-bf02-33ffcb2049bb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.536412] env[69994]: DEBUG nova.compute.provider_tree [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 899.539228] env[69994]: DEBUG oslo_vmware.api [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Waiting for the task: (returnval){ [ 899.539228] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52074b7f-f8fa-5506-2fa3-4f694ed85b66" [ 899.539228] env[69994]: _type = "Task" [ 899.539228] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.547642] env[69994]: DEBUG oslo_vmware.api [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52074b7f-f8fa-5506-2fa3-4f694ed85b66, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.641364] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f5299198-1524-4d64-b5ee-d20bc512ceb5 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Releasing lock "refresh_cache-e3697388-4598-4dde-8c20-43fc7665083b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 899.855509] env[69994]: DEBUG oslo_vmware.api [None req-fc04a935-34f6-4cea-bdb4-6c613f642321 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3242016, 'name': PowerOffVM_Task, 'duration_secs': 0.202093} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.855810] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc04a935-34f6-4cea-bdb4-6c613f642321 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 899.855979] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fc04a935-34f6-4cea-bdb4-6c613f642321 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 899.856257] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e9fb898d-0095-4095-b358-5cd4924c5171 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.939019] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fc04a935-34f6-4cea-bdb4-6c613f642321 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 899.939019] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fc04a935-34f6-4cea-bdb4-6c613f642321 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 899.939019] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc04a935-34f6-4cea-bdb4-6c613f642321 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Deleting the datastore file [datastore1] 4dbf53e0-caa1-41f4-8376-dfba8d8567cd {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 899.939019] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ba917e66-e77e-4bd7-9ccf-d675d6b68822 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.946058] env[69994]: DEBUG oslo_vmware.api [None req-fc04a935-34f6-4cea-bdb4-6c613f642321 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Waiting for the task: (returnval){ [ 899.946058] env[69994]: value = "task-3242019" [ 899.946058] env[69994]: _type = "Task" [ 899.946058] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.955636] env[69994]: DEBUG oslo_vmware.api [None req-fc04a935-34f6-4cea-bdb4-6c613f642321 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3242019, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.960325] env[69994]: DEBUG oslo_vmware.api [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242017, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.501309} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.960564] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 87473dd1-458d-4ef4-a1bd-7e653e509ea4/87473dd1-458d-4ef4-a1bd-7e653e509ea4.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 899.960751] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 899.960988] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c2778443-5c31-4cc5-bb04-8052cde7a32b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.966855] env[69994]: DEBUG oslo_vmware.api [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 899.966855] env[69994]: value = "task-3242020" [ 899.966855] env[69994]: _type = "Task" [ 899.966855] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.974788] env[69994]: DEBUG oslo_vmware.api [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242020, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.044354] env[69994]: DEBUG nova.scheduler.client.report [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 900.053605] env[69994]: DEBUG oslo_vmware.api [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52074b7f-f8fa-5506-2fa3-4f694ed85b66, 'name': SearchDatastore_Task, 'duration_secs': 0.018939} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.054388] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ffcc580f-fd65-4300-8c3d-5e3e3ecf25c2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.059311] env[69994]: DEBUG oslo_vmware.api [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Waiting for the task: (returnval){ [ 900.059311] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]527878de-fdfe-59e3-a740-e303b06ad7e0" [ 900.059311] env[69994]: _type = "Task" [ 900.059311] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.067979] env[69994]: DEBUG oslo_vmware.api [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]527878de-fdfe-59e3-a740-e303b06ad7e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.146137] env[69994]: DEBUG nova.compute.manager [None req-f5299198-1524-4d64-b5ee-d20bc512ceb5 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 900.146604] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad4365d6-6c8b-46eb-9439-426be73eaf92 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.195169] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "45a8dced-6c49-441c-92e2-ee323ed8753c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 900.195460] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "45a8dced-6c49-441c-92e2-ee323ed8753c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 900.195695] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "45a8dced-6c49-441c-92e2-ee323ed8753c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 900.456692] env[69994]: DEBUG oslo_vmware.api [None req-fc04a935-34f6-4cea-bdb4-6c613f642321 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Task: {'id': task-3242019, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160104} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.457746] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc04a935-34f6-4cea-bdb4-6c613f642321 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 900.458739] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fc04a935-34f6-4cea-bdb4-6c613f642321 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 900.458739] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fc04a935-34f6-4cea-bdb4-6c613f642321 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 900.458739] env[69994]: INFO nova.compute.manager [None req-fc04a935-34f6-4cea-bdb4-6c613f642321 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Took 1.13 seconds to destroy the instance on the hypervisor. [ 900.458917] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fc04a935-34f6-4cea-bdb4-6c613f642321 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 900.459472] env[69994]: DEBUG nova.compute.manager [-] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 900.459472] env[69994]: DEBUG nova.network.neutron [-] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 900.477496] env[69994]: DEBUG oslo_vmware.api [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242020, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076868} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.477749] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 900.478532] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f83cd8a2-7e4c-4683-9b64-0ed91dc628cf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.501190] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Reconfiguring VM instance instance-00000021 to attach disk [datastore1] 87473dd1-458d-4ef4-a1bd-7e653e509ea4/87473dd1-458d-4ef4-a1bd-7e653e509ea4.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 900.501484] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7cf11254-3258-49c9-83a0-58dad2db6b15 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.522761] env[69994]: DEBUG oslo_vmware.api [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 900.522761] env[69994]: value = "task-3242021" [ 900.522761] env[69994]: _type = "Task" [ 900.522761] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.530453] env[69994]: DEBUG oslo_vmware.api [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242021, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.551492] env[69994]: DEBUG oslo_concurrency.lockutils [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.554s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 900.552186] env[69994]: DEBUG nova.compute.manager [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 900.554792] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 37.698s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 900.572205] env[69994]: DEBUG oslo_vmware.api [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]527878de-fdfe-59e3-a740-e303b06ad7e0, 'name': SearchDatastore_Task, 'duration_secs': 0.009596} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.572532] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 900.572852] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc/c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 900.573162] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c8689ede-ffbd-4761-b4e0-3ea34d72cfd9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.580043] env[69994]: DEBUG oslo_vmware.api [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Waiting for the task: (returnval){ [ 900.580043] env[69994]: value = "task-3242022" [ 900.580043] env[69994]: _type = "Task" [ 900.580043] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.588518] env[69994]: DEBUG oslo_vmware.api [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Task: {'id': task-3242022, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.588891] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2f1aa446-0ae9-453c-bc74-b671935b11e0 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Acquiring lock "b003b7c2-e754-440e-8a65-13c5e9c68cd5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 900.589172] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2f1aa446-0ae9-453c-bc74-b671935b11e0 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Lock "b003b7c2-e754-440e-8a65-13c5e9c68cd5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 900.589402] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2f1aa446-0ae9-453c-bc74-b671935b11e0 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Acquiring lock "b003b7c2-e754-440e-8a65-13c5e9c68cd5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 900.589619] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2f1aa446-0ae9-453c-bc74-b671935b11e0 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Lock "b003b7c2-e754-440e-8a65-13c5e9c68cd5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 900.589788] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2f1aa446-0ae9-453c-bc74-b671935b11e0 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Lock "b003b7c2-e754-440e-8a65-13c5e9c68cd5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 900.592378] env[69994]: INFO nova.compute.manager [None req-2f1aa446-0ae9-453c-bc74-b671935b11e0 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Terminating instance [ 901.034064] env[69994]: DEBUG oslo_vmware.api [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242021, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.057694] env[69994]: DEBUG nova.compute.utils [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 901.059288] env[69994]: DEBUG nova.objects.instance [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Lazy-loading 'migration_context' on Instance uuid f07750f5-3f1d-4d97-98dc-285ed357cc7e {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 901.062882] env[69994]: DEBUG nova.compute.manager [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 901.063086] env[69994]: DEBUG nova.network.neutron [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 901.089693] env[69994]: DEBUG oslo_vmware.api [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Task: {'id': task-3242022, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.100192] env[69994]: DEBUG nova.compute.manager [None req-2f1aa446-0ae9-453c-bc74-b671935b11e0 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 901.101278] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2f1aa446-0ae9-453c-bc74-b671935b11e0 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 901.103443] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c3d0ce2-1092-4e4a-a6ba-aa6232d340a5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.118745] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f1aa446-0ae9-453c-bc74-b671935b11e0 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 901.119385] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-538ed021-fb2e-4761-8b2c-cf04dc5f5aca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.127639] env[69994]: DEBUG oslo_vmware.api [None req-2f1aa446-0ae9-453c-bc74-b671935b11e0 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Waiting for the task: (returnval){ [ 901.127639] env[69994]: value = "task-3242023" [ 901.127639] env[69994]: _type = "Task" [ 901.127639] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.140337] env[69994]: DEBUG oslo_vmware.api [None req-2f1aa446-0ae9-453c-bc74-b671935b11e0 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3242023, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.156080] env[69994]: DEBUG nova.policy [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ffda4115deeb413da8f52e964b3cc24e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8d7c614ed22d4fa1af10f9374dcc90b9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 901.168314] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba2ef766-bf0d-4307-9d7a-e1185b79cb5d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.178498] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f5299198-1524-4d64-b5ee-d20bc512ceb5 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Doing hard reboot of VM {{(pid=69994) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 901.179017] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-b6ed3323-48d1-43bc-8162-5953f6277335 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.187277] env[69994]: DEBUG oslo_vmware.api [None req-f5299198-1524-4d64-b5ee-d20bc512ceb5 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Waiting for the task: (returnval){ [ 901.187277] env[69994]: value = "task-3242024" [ 901.187277] env[69994]: _type = "Task" [ 901.187277] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.199909] env[69994]: DEBUG oslo_vmware.api [None req-f5299198-1524-4d64-b5ee-d20bc512ceb5 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': task-3242024, 'name': ResetVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.275152] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "refresh_cache-45a8dced-6c49-441c-92e2-ee323ed8753c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.275579] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquired lock "refresh_cache-45a8dced-6c49-441c-92e2-ee323ed8753c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 901.275931] env[69994]: DEBUG nova.network.neutron [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 901.486850] env[69994]: DEBUG nova.compute.manager [req-fccfc04d-ba6f-499f-bf23-dbadf9b3ff3b req-49f42c1c-7509-4dae-8b5b-0fd94df86c90 service nova] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Received event network-vif-deleted-064c1f51-da25-4b26-a357-69f406a06504 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 901.486983] env[69994]: INFO nova.compute.manager [req-fccfc04d-ba6f-499f-bf23-dbadf9b3ff3b req-49f42c1c-7509-4dae-8b5b-0fd94df86c90 service nova] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Neutron deleted interface 064c1f51-da25-4b26-a357-69f406a06504; detaching it from the instance and deleting it from the info cache [ 901.487188] env[69994]: DEBUG nova.network.neutron [req-fccfc04d-ba6f-499f-bf23-dbadf9b3ff3b req-49f42c1c-7509-4dae-8b5b-0fd94df86c90 service nova] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.534306] env[69994]: DEBUG oslo_vmware.api [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242021, 'name': ReconfigVM_Task, 'duration_secs': 0.866494} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.534680] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Reconfigured VM instance instance-00000021 to attach disk [datastore1] 87473dd1-458d-4ef4-a1bd-7e653e509ea4/87473dd1-458d-4ef4-a1bd-7e653e509ea4.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 901.535240] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c4b7d648-958c-4434-a7f5-5e95e65e846d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.541463] env[69994]: DEBUG oslo_vmware.api [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 901.541463] env[69994]: value = "task-3242025" [ 901.541463] env[69994]: _type = "Task" [ 901.541463] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.549832] env[69994]: DEBUG oslo_vmware.api [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242025, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.565185] env[69994]: DEBUG nova.network.neutron [-] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.569325] env[69994]: DEBUG nova.compute.manager [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 901.590366] env[69994]: DEBUG oslo_vmware.api [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Task: {'id': task-3242022, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.845379} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.593084] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc/c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 901.593311] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 901.593739] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4c3e8258-a788-4e0c-99d6-f62dbfd35cb5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.602017] env[69994]: DEBUG oslo_vmware.api [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Waiting for the task: (returnval){ [ 901.602017] env[69994]: value = "task-3242026" [ 901.602017] env[69994]: _type = "Task" [ 901.602017] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.608931] env[69994]: DEBUG oslo_vmware.api [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Task: {'id': task-3242026, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.636499] env[69994]: DEBUG oslo_vmware.api [None req-2f1aa446-0ae9-453c-bc74-b671935b11e0 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3242023, 'name': PowerOffVM_Task, 'duration_secs': 0.396002} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.636765] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f1aa446-0ae9-453c-bc74-b671935b11e0 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 901.636936] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2f1aa446-0ae9-453c-bc74-b671935b11e0 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 901.637214] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-66ad7071-de39-4ab9-b883-f682b00882ff {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.697340] env[69994]: DEBUG oslo_vmware.api [None req-f5299198-1524-4d64-b5ee-d20bc512ceb5 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': task-3242024, 'name': ResetVM_Task, 'duration_secs': 0.099444} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.697673] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f5299198-1524-4d64-b5ee-d20bc512ceb5 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Did hard reboot of VM {{(pid=69994) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 901.697874] env[69994]: DEBUG nova.compute.manager [None req-f5299198-1524-4d64-b5ee-d20bc512ceb5 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 901.698666] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8feaaf5-9864-4f01-a942-eb0b34c8708a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.718237] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2f1aa446-0ae9-453c-bc74-b671935b11e0 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 901.718237] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2f1aa446-0ae9-453c-bc74-b671935b11e0 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 901.718237] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f1aa446-0ae9-453c-bc74-b671935b11e0 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Deleting the datastore file [datastore2] b003b7c2-e754-440e-8a65-13c5e9c68cd5 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 901.719514] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e9da647c-94a9-4844-ab10-d94a8fe124b4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.726014] env[69994]: DEBUG oslo_vmware.api [None req-2f1aa446-0ae9-453c-bc74-b671935b11e0 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Waiting for the task: (returnval){ [ 901.726014] env[69994]: value = "task-3242028" [ 901.726014] env[69994]: _type = "Task" [ 901.726014] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.731382] env[69994]: DEBUG nova.network.neutron [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Successfully created port: 958fcea0-f2d3-40f0-9433-fefae2a2245d {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 901.740770] env[69994]: DEBUG oslo_vmware.api [None req-2f1aa446-0ae9-453c-bc74-b671935b11e0 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3242028, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.990798] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-91dccc3e-d19c-4d9f-9ff3-ec846cbaee55 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.007018] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a850c63-5f6b-43d7-9c6e-50ee1f9560bc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.018475] env[69994]: DEBUG nova.network.neutron [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Updating instance_info_cache with network_info: [{"id": "003af7d4-a8a5-43d4-b032-96df0b4ae173", "address": "fa:16:3e:a4:62:49", "network": {"id": "6d4a143e-4f55-4918-8454-462892aacf0e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1594130263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "605d72502cc644bfa4d875bf348246de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52e117d3-d120-42c6-8e72-70085845acbf", "external-id": "nsx-vlan-transportzone-934", "segmentation_id": 934, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap003af7d4-a8", "ovs_interfaceid": "003af7d4-a8a5-43d4-b032-96df0b4ae173", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.050375] env[69994]: DEBUG nova.compute.manager [req-fccfc04d-ba6f-499f-bf23-dbadf9b3ff3b req-49f42c1c-7509-4dae-8b5b-0fd94df86c90 service nova] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Detach interface failed, port_id=064c1f51-da25-4b26-a357-69f406a06504, reason: Instance 4dbf53e0-caa1-41f4-8376-dfba8d8567cd could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 902.061574] env[69994]: DEBUG oslo_vmware.api [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242025, 'name': Rename_Task, 'duration_secs': 0.369248} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.061971] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 902.062239] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-191df74a-7deb-42a1-9665-f145499b6a99 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.067885] env[69994]: DEBUG oslo_vmware.api [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 902.067885] env[69994]: value = "task-3242029" [ 902.067885] env[69994]: _type = "Task" [ 902.067885] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.073937] env[69994]: INFO nova.compute.manager [-] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Took 1.61 seconds to deallocate network for instance. [ 902.086721] env[69994]: DEBUG oslo_vmware.api [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242029, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.101600] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f95f768-6905-40d2-8e36-750da5c9eecf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.114693] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51462d6b-4b7f-42e5-bf6b-85b524980779 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.119561] env[69994]: DEBUG oslo_vmware.api [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Task: {'id': task-3242026, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.104409} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.119561] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 902.120661] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab59be42-8aa4-4a76-9e32-65ad3eb6b845 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.150982] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c711a17-78b4-4606-9aa5-c3cb1e2e6691 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.171573] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc/c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 902.172605] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-46aaf7c2-4a66-4a3d-a46f-bec505b52567 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.190958] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91373dd9-4cf8-4f08-a47b-b6d626378c73 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.196213] env[69994]: DEBUG oslo_vmware.api [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Waiting for the task: (returnval){ [ 902.196213] env[69994]: value = "task-3242030" [ 902.196213] env[69994]: _type = "Task" [ 902.196213] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.207656] env[69994]: DEBUG nova.compute.provider_tree [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 902.216807] env[69994]: DEBUG oslo_vmware.api [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Task: {'id': task-3242030, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.217845] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f5299198-1524-4d64-b5ee-d20bc512ceb5 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Lock "e3697388-4598-4dde-8c20-43fc7665083b" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.414s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 902.238151] env[69994]: DEBUG oslo_vmware.api [None req-2f1aa446-0ae9-453c-bc74-b671935b11e0 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Task: {'id': task-3242028, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152022} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.238507] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f1aa446-0ae9-453c-bc74-b671935b11e0 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 902.238777] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2f1aa446-0ae9-453c-bc74-b671935b11e0 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 902.239052] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2f1aa446-0ae9-453c-bc74-b671935b11e0 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 902.239323] env[69994]: INFO nova.compute.manager [None req-2f1aa446-0ae9-453c-bc74-b671935b11e0 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Took 1.14 seconds to destroy the instance on the hypervisor. [ 902.239672] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2f1aa446-0ae9-453c-bc74-b671935b11e0 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 902.240249] env[69994]: DEBUG nova.compute.manager [-] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 902.240410] env[69994]: DEBUG nova.network.neutron [-] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 902.526480] env[69994]: DEBUG oslo_concurrency.lockutils [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Releasing lock "refresh_cache-45a8dced-6c49-441c-92e2-ee323ed8753c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 902.582627] env[69994]: DEBUG nova.compute.manager [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 902.585203] env[69994]: DEBUG oslo_vmware.api [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242029, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.588697] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc04a935-34f6-4cea-bdb4-6c613f642321 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 902.616140] env[69994]: DEBUG nova.virt.hardware [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 902.616140] env[69994]: DEBUG nova.virt.hardware [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 902.616140] env[69994]: DEBUG nova.virt.hardware [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 902.616140] env[69994]: DEBUG nova.virt.hardware [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 902.616140] env[69994]: DEBUG nova.virt.hardware [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 902.616140] env[69994]: DEBUG nova.virt.hardware [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 902.616541] env[69994]: DEBUG nova.virt.hardware [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 902.616858] env[69994]: DEBUG nova.virt.hardware [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 902.617207] env[69994]: DEBUG nova.virt.hardware [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 902.617532] env[69994]: DEBUG nova.virt.hardware [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 902.617887] env[69994]: DEBUG nova.virt.hardware [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 902.619193] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-670fbe31-d872-4da7-a64d-cf9d75e9ce10 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.627964] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8845f9f-4ffa-44dd-bcaa-d7b939d27a92 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.716332] env[69994]: DEBUG nova.scheduler.client.report [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 902.718476] env[69994]: DEBUG oslo_vmware.api [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Task: {'id': task-3242030, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.028827] env[69994]: DEBUG nova.compute.manager [req-9d12edc8-bf1c-4a00-bbfd-c54cce75331d req-162755e9-ecae-4323-a5fa-9743d5c1a4ea service nova] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Received event network-vif-deleted-a16b9fc2-06de-47cb-b39f-b77130ed0eec {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 903.028827] env[69994]: INFO nova.compute.manager [req-9d12edc8-bf1c-4a00-bbfd-c54cce75331d req-162755e9-ecae-4323-a5fa-9743d5c1a4ea service nova] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Neutron deleted interface a16b9fc2-06de-47cb-b39f-b77130ed0eec; detaching it from the instance and deleting it from the info cache [ 903.028827] env[69994]: DEBUG nova.network.neutron [req-9d12edc8-bf1c-4a00-bbfd-c54cce75331d req-162755e9-ecae-4323-a5fa-9743d5c1a4ea service nova] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.061265] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d179cee-3b45-46a1-831d-028ae1f2c0e4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.090381] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c85388a-ba66-48a3-ad12-b072d0817f64 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.099101] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Updating instance '45a8dced-6c49-441c-92e2-ee323ed8753c' progress to 83 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 903.115468] env[69994]: DEBUG oslo_vmware.api [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242029, 'name': PowerOnVM_Task, 'duration_secs': 0.764295} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.116057] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 903.116260] env[69994]: DEBUG nova.compute.manager [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 903.117107] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dda20641-9cb0-41d0-a95d-fd96f54705e8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.207463] env[69994]: DEBUG oslo_vmware.api [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Task: {'id': task-3242030, 'name': ReconfigVM_Task, 'duration_secs': 0.554172} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.207791] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Reconfigured VM instance instance-00000043 to attach disk [datastore1] c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc/c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 903.208379] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-094c8ce7-22b1-44c0-8445-6398474ccc9a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.214911] env[69994]: DEBUG oslo_vmware.api [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Waiting for the task: (returnval){ [ 903.214911] env[69994]: value = "task-3242031" [ 903.214911] env[69994]: _type = "Task" [ 903.214911] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.226171] env[69994]: DEBUG oslo_vmware.api [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Task: {'id': task-3242031, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.369820] env[69994]: DEBUG nova.network.neutron [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Successfully updated port: 958fcea0-f2d3-40f0-9433-fefae2a2245d {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 903.416131] env[69994]: DEBUG nova.network.neutron [-] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.443629] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1b9acb53-e17c-4b6a-ba36-5ff3042416d2 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Acquiring lock "e3697388-4598-4dde-8c20-43fc7665083b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 903.443936] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1b9acb53-e17c-4b6a-ba36-5ff3042416d2 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Lock "e3697388-4598-4dde-8c20-43fc7665083b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 903.444219] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1b9acb53-e17c-4b6a-ba36-5ff3042416d2 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Acquiring lock "e3697388-4598-4dde-8c20-43fc7665083b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 903.444527] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1b9acb53-e17c-4b6a-ba36-5ff3042416d2 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Lock "e3697388-4598-4dde-8c20-43fc7665083b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 903.445220] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1b9acb53-e17c-4b6a-ba36-5ff3042416d2 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Lock "e3697388-4598-4dde-8c20-43fc7665083b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 903.447773] env[69994]: INFO nova.compute.manager [None req-1b9acb53-e17c-4b6a-ba36-5ff3042416d2 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Terminating instance [ 903.529360] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-70becd42-f006-4bbe-ae2f-9ed58637933e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.540970] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da2dc46b-6838-41c1-b23a-f32475ca5b9a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.579297] env[69994]: DEBUG nova.compute.manager [req-9d12edc8-bf1c-4a00-bbfd-c54cce75331d req-162755e9-ecae-4323-a5fa-9743d5c1a4ea service nova] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Detach interface failed, port_id=a16b9fc2-06de-47cb-b39f-b77130ed0eec, reason: Instance b003b7c2-e754-440e-8a65-13c5e9c68cd5 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 903.610973] env[69994]: DEBUG nova.compute.manager [req-c95f8cec-5d41-45c3-8d6b-cd545aed4f8a req-1d21dc70-b69f-470d-b114-81f8e52ad9fe service nova] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Received event network-changed-772e74a8-0286-4533-b4ad-63afc1c9e6e4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 903.611198] env[69994]: DEBUG nova.compute.manager [req-c95f8cec-5d41-45c3-8d6b-cd545aed4f8a req-1d21dc70-b69f-470d-b114-81f8e52ad9fe service nova] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Refreshing instance network info cache due to event network-changed-772e74a8-0286-4533-b4ad-63afc1c9e6e4. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 903.611416] env[69994]: DEBUG oslo_concurrency.lockutils [req-c95f8cec-5d41-45c3-8d6b-cd545aed4f8a req-1d21dc70-b69f-470d-b114-81f8e52ad9fe service nova] Acquiring lock "refresh_cache-e3697388-4598-4dde-8c20-43fc7665083b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.611582] env[69994]: DEBUG oslo_concurrency.lockutils [req-c95f8cec-5d41-45c3-8d6b-cd545aed4f8a req-1d21dc70-b69f-470d-b114-81f8e52ad9fe service nova] Acquired lock "refresh_cache-e3697388-4598-4dde-8c20-43fc7665083b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 903.611749] env[69994]: DEBUG nova.network.neutron [req-c95f8cec-5d41-45c3-8d6b-cd545aed4f8a req-1d21dc70-b69f-470d-b114-81f8e52ad9fe service nova] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Refreshing network info cache for port 772e74a8-0286-4533-b4ad-63afc1c9e6e4 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 903.618353] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-67cd0efc-7c82-42a5-a1f9-3b2c99b38ed8 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Updating instance '45a8dced-6c49-441c-92e2-ee323ed8753c' progress to 100 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 903.637258] env[69994]: DEBUG oslo_concurrency.lockutils [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 903.725778] env[69994]: DEBUG oslo_vmware.api [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Task: {'id': task-3242031, 'name': Rename_Task, 'duration_secs': 0.193799} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.726116] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 903.726378] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-487a21cf-4c31-4e11-9036-a5db2aa4ae9f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.729647] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.175s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 903.735441] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 40.290s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 903.744660] env[69994]: DEBUG oslo_vmware.api [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Waiting for the task: (returnval){ [ 903.744660] env[69994]: value = "task-3242032" [ 903.744660] env[69994]: _type = "Task" [ 903.744660] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.754200] env[69994]: DEBUG oslo_vmware.api [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Task: {'id': task-3242032, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.872435] env[69994]: DEBUG oslo_concurrency.lockutils [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Acquiring lock "refresh_cache-4ca53416-caed-418c-bb40-cabb8e311803" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.872708] env[69994]: DEBUG oslo_concurrency.lockutils [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Acquired lock "refresh_cache-4ca53416-caed-418c-bb40-cabb8e311803" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 903.873549] env[69994]: DEBUG nova.network.neutron [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 903.919188] env[69994]: INFO nova.compute.manager [-] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Took 1.68 seconds to deallocate network for instance. [ 903.952033] env[69994]: DEBUG nova.compute.manager [None req-1b9acb53-e17c-4b6a-ba36-5ff3042416d2 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 903.952200] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1b9acb53-e17c-4b6a-ba36-5ff3042416d2 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 903.953112] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edcf386f-3db5-4007-ba9b-1043153bc164 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.960662] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b9acb53-e17c-4b6a-ba36-5ff3042416d2 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 903.960908] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-071c4e83-e6dd-4260-bda5-79b6f0c753de {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.966736] env[69994]: DEBUG oslo_vmware.api [None req-1b9acb53-e17c-4b6a-ba36-5ff3042416d2 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Waiting for the task: (returnval){ [ 903.966736] env[69994]: value = "task-3242033" [ 903.966736] env[69994]: _type = "Task" [ 903.966736] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.975747] env[69994]: DEBUG oslo_vmware.api [None req-1b9acb53-e17c-4b6a-ba36-5ff3042416d2 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': task-3242033, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.273289] env[69994]: DEBUG oslo_vmware.api [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Task: {'id': task-3242032, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.276082] env[69994]: INFO nova.compute.manager [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Rebuilding instance [ 904.323821] env[69994]: DEBUG nova.compute.manager [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 904.324834] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93783131-a593-4b9a-a0b7-45705c26fc8c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.371636] env[69994]: DEBUG nova.network.neutron [req-c95f8cec-5d41-45c3-8d6b-cd545aed4f8a req-1d21dc70-b69f-470d-b114-81f8e52ad9fe service nova] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Updated VIF entry in instance network info cache for port 772e74a8-0286-4533-b4ad-63afc1c9e6e4. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 904.371636] env[69994]: DEBUG nova.network.neutron [req-c95f8cec-5d41-45c3-8d6b-cd545aed4f8a req-1d21dc70-b69f-470d-b114-81f8e52ad9fe service nova] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Updating instance_info_cache with network_info: [{"id": "772e74a8-0286-4533-b4ad-63afc1c9e6e4", "address": "fa:16:3e:b9:e0:62", "network": {"id": "2e51d0d8-050c-40b5-b20d-b754df62b09e", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-955665594-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95dff361679f4d3eb08daf6701c7ab82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d31a554-a94c-4471-892f-f65aa87b8279", "external-id": "nsx-vlan-transportzone-241", "segmentation_id": 241, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap772e74a8-02", "ovs_interfaceid": "772e74a8-0286-4533-b4ad-63afc1c9e6e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.404691] env[69994]: DEBUG nova.network.neutron [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 904.425923] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2f1aa446-0ae9-453c-bc74-b671935b11e0 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 904.477268] env[69994]: DEBUG oslo_vmware.api [None req-1b9acb53-e17c-4b6a-ba36-5ff3042416d2 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': task-3242033, 'name': PowerOffVM_Task, 'duration_secs': 0.185421} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.477623] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b9acb53-e17c-4b6a-ba36-5ff3042416d2 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 904.477812] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1b9acb53-e17c-4b6a-ba36-5ff3042416d2 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 904.478113] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-39c2d2ab-d694-4fe7-a307-23459e764014 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.538740] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1b9acb53-e17c-4b6a-ba36-5ff3042416d2 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 904.539086] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1b9acb53-e17c-4b6a-ba36-5ff3042416d2 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 904.539359] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b9acb53-e17c-4b6a-ba36-5ff3042416d2 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Deleting the datastore file [datastore2] e3697388-4598-4dde-8c20-43fc7665083b {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 904.539640] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-97a3cfcc-fb6d-4aee-836f-6acf8df64f88 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.544884] env[69994]: DEBUG nova.network.neutron [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Updating instance_info_cache with network_info: [{"id": "958fcea0-f2d3-40f0-9433-fefae2a2245d", "address": "fa:16:3e:aa:ec:38", "network": {"id": "49505810-30cb-4b94-b6ac-8b546a13a835", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-623797230-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d7c614ed22d4fa1af10f9374dcc90b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b911797-478d-4ee5-bce9-6f2f49014e94", "external-id": "nsx-vlan-transportzone-70", "segmentation_id": 70, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap958fcea0-f2", "ovs_interfaceid": "958fcea0-f2d3-40f0-9433-fefae2a2245d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.548350] env[69994]: DEBUG oslo_vmware.api [None req-1b9acb53-e17c-4b6a-ba36-5ff3042416d2 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Waiting for the task: (returnval){ [ 904.548350] env[69994]: value = "task-3242035" [ 904.548350] env[69994]: _type = "Task" [ 904.548350] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.559945] env[69994]: DEBUG oslo_vmware.api [None req-1b9acb53-e17c-4b6a-ba36-5ff3042416d2 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': task-3242035, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.761501] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Applying migration context for instance 45a8dced-6c49-441c-92e2-ee323ed8753c as it has an incoming, in-progress migration a470e5c8-07de-40fb-a4de-5addff23af5a. Migration status is finished {{(pid=69994) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 904.763987] env[69994]: INFO nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Updating resource usage from migration a470e5c8-07de-40fb-a4de-5addff23af5a [ 904.779621] env[69994]: DEBUG oslo_vmware.api [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Task: {'id': task-3242032, 'name': PowerOnVM_Task, 'duration_secs': 0.856868} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.781058] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 904.781285] env[69994]: INFO nova.compute.manager [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Took 8.99 seconds to spawn the instance on the hypervisor. [ 904.781480] env[69994]: DEBUG nova.compute.manager [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 904.784240] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30788db5-0407-4b7d-9386-c0c87631ddd6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.788118] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance b003b7c2-e754-440e-8a65-13c5e9c68cd5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 904.788275] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance dbad6bed-64ba-4dfd-abad-c0b2c775ba2c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 904.788417] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 904.788518] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 87473dd1-458d-4ef4-a1bd-7e653e509ea4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 904.788658] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance e8caf244-413b-49bb-bdff-79aca0ccbc2b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 904.788857] env[69994]: WARNING nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 4dbf53e0-caa1-41f4-8376-dfba8d8567cd is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 904.788987] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance ed662f67-be0e-4f19-bb8a-6af39b4d348c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 904.789116] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance b99b73e6-3348-4d5d-aa57-f01ace0bfc42 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 904.789258] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance eff21ec5-a51d-4004-9edf-1891f706fe9c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 904.789379] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 904.789493] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 566522b0-7aa7-4552-9be7-035d742ba394 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 904.789633] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance a4544bc9-6935-4825-9b45-2054d2ced330 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 904.789777] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 904.789917] env[69994]: WARNING nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance cd5a47f2-147b-4e50-980d-8e1c40bc7594 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 904.790055] env[69994]: WARNING nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 14b28a21-1b71-4d7e-bd6c-269f5d588300 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 904.790241] env[69994]: WARNING nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance f3268fe1-768c-4d27-828a-5885ce166f90 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 904.790319] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 25a64898-568e-4095-aace-f8a564cdf916 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 904.790429] env[69994]: WARNING nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 0b975ce0-40a4-48a9-a046-66227636d496 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 904.875833] env[69994]: DEBUG oslo_concurrency.lockutils [req-c95f8cec-5d41-45c3-8d6b-cd545aed4f8a req-1d21dc70-b69f-470d-b114-81f8e52ad9fe service nova] Releasing lock "refresh_cache-e3697388-4598-4dde-8c20-43fc7665083b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 904.875833] env[69994]: DEBUG nova.compute.manager [req-c95f8cec-5d41-45c3-8d6b-cd545aed4f8a req-1d21dc70-b69f-470d-b114-81f8e52ad9fe service nova] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Received event network-vif-plugged-958fcea0-f2d3-40f0-9433-fefae2a2245d {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 904.875833] env[69994]: DEBUG oslo_concurrency.lockutils [req-c95f8cec-5d41-45c3-8d6b-cd545aed4f8a req-1d21dc70-b69f-470d-b114-81f8e52ad9fe service nova] Acquiring lock "4ca53416-caed-418c-bb40-cabb8e311803-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 904.875833] env[69994]: DEBUG oslo_concurrency.lockutils [req-c95f8cec-5d41-45c3-8d6b-cd545aed4f8a req-1d21dc70-b69f-470d-b114-81f8e52ad9fe service nova] Lock "4ca53416-caed-418c-bb40-cabb8e311803-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 904.875833] env[69994]: DEBUG oslo_concurrency.lockutils [req-c95f8cec-5d41-45c3-8d6b-cd545aed4f8a req-1d21dc70-b69f-470d-b114-81f8e52ad9fe service nova] Lock "4ca53416-caed-418c-bb40-cabb8e311803-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 904.875833] env[69994]: DEBUG nova.compute.manager [req-c95f8cec-5d41-45c3-8d6b-cd545aed4f8a req-1d21dc70-b69f-470d-b114-81f8e52ad9fe service nova] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] No waiting events found dispatching network-vif-plugged-958fcea0-f2d3-40f0-9433-fefae2a2245d {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 904.875833] env[69994]: WARNING nova.compute.manager [req-c95f8cec-5d41-45c3-8d6b-cd545aed4f8a req-1d21dc70-b69f-470d-b114-81f8e52ad9fe service nova] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Received unexpected event network-vif-plugged-958fcea0-f2d3-40f0-9433-fefae2a2245d for instance with vm_state building and task_state spawning. [ 904.875833] env[69994]: DEBUG nova.compute.manager [req-c95f8cec-5d41-45c3-8d6b-cd545aed4f8a req-1d21dc70-b69f-470d-b114-81f8e52ad9fe service nova] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Received event network-changed-958fcea0-f2d3-40f0-9433-fefae2a2245d {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 904.875833] env[69994]: DEBUG nova.compute.manager [req-c95f8cec-5d41-45c3-8d6b-cd545aed4f8a req-1d21dc70-b69f-470d-b114-81f8e52ad9fe service nova] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Refreshing instance network info cache due to event network-changed-958fcea0-f2d3-40f0-9433-fefae2a2245d. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 904.875833] env[69994]: DEBUG oslo_concurrency.lockutils [req-c95f8cec-5d41-45c3-8d6b-cd545aed4f8a req-1d21dc70-b69f-470d-b114-81f8e52ad9fe service nova] Acquiring lock "refresh_cache-4ca53416-caed-418c-bb40-cabb8e311803" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.053021] env[69994]: DEBUG oslo_concurrency.lockutils [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Releasing lock "refresh_cache-4ca53416-caed-418c-bb40-cabb8e311803" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 905.053021] env[69994]: DEBUG nova.compute.manager [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Instance network_info: |[{"id": "958fcea0-f2d3-40f0-9433-fefae2a2245d", "address": "fa:16:3e:aa:ec:38", "network": {"id": "49505810-30cb-4b94-b6ac-8b546a13a835", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-623797230-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d7c614ed22d4fa1af10f9374dcc90b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b911797-478d-4ee5-bce9-6f2f49014e94", "external-id": "nsx-vlan-transportzone-70", "segmentation_id": 70, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap958fcea0-f2", "ovs_interfaceid": "958fcea0-f2d3-40f0-9433-fefae2a2245d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 905.053021] env[69994]: DEBUG oslo_concurrency.lockutils [req-c95f8cec-5d41-45c3-8d6b-cd545aed4f8a req-1d21dc70-b69f-470d-b114-81f8e52ad9fe service nova] Acquired lock "refresh_cache-4ca53416-caed-418c-bb40-cabb8e311803" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 905.053021] env[69994]: DEBUG nova.network.neutron [req-c95f8cec-5d41-45c3-8d6b-cd545aed4f8a req-1d21dc70-b69f-470d-b114-81f8e52ad9fe service nova] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Refreshing network info cache for port 958fcea0-f2d3-40f0-9433-fefae2a2245d {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 905.053021] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:aa:ec:38', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9b911797-478d-4ee5-bce9-6f2f49014e94', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '958fcea0-f2d3-40f0-9433-fefae2a2245d', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 905.060702] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Creating folder: Project (8d7c614ed22d4fa1af10f9374dcc90b9). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 905.065836] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-640907fa-620a-4695-9679-463a0374574a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.074605] env[69994]: DEBUG oslo_vmware.api [None req-1b9acb53-e17c-4b6a-ba36-5ff3042416d2 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': task-3242035, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174971} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.074906] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b9acb53-e17c-4b6a-ba36-5ff3042416d2 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 905.075463] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1b9acb53-e17c-4b6a-ba36-5ff3042416d2 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 905.075463] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1b9acb53-e17c-4b6a-ba36-5ff3042416d2 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 905.075736] env[69994]: INFO nova.compute.manager [None req-1b9acb53-e17c-4b6a-ba36-5ff3042416d2 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Took 1.12 seconds to destroy the instance on the hypervisor. [ 905.076128] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1b9acb53-e17c-4b6a-ba36-5ff3042416d2 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 905.077778] env[69994]: DEBUG nova.compute.manager [-] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 905.077944] env[69994]: DEBUG nova.network.neutron [-] [instance: e3697388-4598-4dde-8c20-43fc7665083b] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 905.079839] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Created folder: Project (8d7c614ed22d4fa1af10f9374dcc90b9) in parent group-v647729. [ 905.079929] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Creating folder: Instances. Parent ref: group-v647926. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 905.080221] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a338c0ee-a5f6-4483-9be9-202be07f178d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.090175] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Created folder: Instances in parent group-v647926. [ 905.090175] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 905.090324] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 905.090561] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ed6db9e7-2566-4e5c-bf95-14ffd9de7774 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.109570] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 905.109570] env[69994]: value = "task-3242038" [ 905.109570] env[69994]: _type = "Task" [ 905.109570] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.117584] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242038, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.296317] env[69994]: DEBUG oslo_concurrency.lockutils [None req-29070d91-5318-4a32-a07c-45df93d1a66c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Acquiring lock "7f66a148-86fe-4ddc-b8ed-6e6a306bbc24" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 905.296317] env[69994]: DEBUG oslo_concurrency.lockutils [None req-29070d91-5318-4a32-a07c-45df93d1a66c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "7f66a148-86fe-4ddc-b8ed-6e6a306bbc24" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 905.296317] env[69994]: DEBUG oslo_concurrency.lockutils [None req-29070d91-5318-4a32-a07c-45df93d1a66c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Acquiring lock "7f66a148-86fe-4ddc-b8ed-6e6a306bbc24-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 905.296317] env[69994]: DEBUG oslo_concurrency.lockutils [None req-29070d91-5318-4a32-a07c-45df93d1a66c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "7f66a148-86fe-4ddc-b8ed-6e6a306bbc24-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 905.296317] env[69994]: DEBUG oslo_concurrency.lockutils [None req-29070d91-5318-4a32-a07c-45df93d1a66c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "7f66a148-86fe-4ddc-b8ed-6e6a306bbc24-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 905.296317] env[69994]: INFO nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 95a615f0-9414-47d5-965f-8bcf9c644849 has allocations against this compute host but is not found in the database. [ 905.296317] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance f07750f5-3f1d-4d97-98dc-285ed357cc7e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 905.296317] env[69994]: WARNING nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 7ef329a2-4d61-428a-8a43-f309a1e953d6 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 905.296317] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance ef410b09-8686-409e-8391-d50cd0e0df04 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 905.296317] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance ff645ae7-940e-4842-8915-a96d36d08067 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 905.296317] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance e3697388-4598-4dde-8c20-43fc7665083b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 905.296317] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Migration a470e5c8-07de-40fb-a4de-5addff23af5a is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 905.297230] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 45a8dced-6c49-441c-92e2-ee323ed8753c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 905.297520] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 905.297747] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 4ca53416-caed-418c-bb40-cabb8e311803 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 905.299258] env[69994]: INFO nova.compute.manager [None req-29070d91-5318-4a32-a07c-45df93d1a66c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Terminating instance [ 905.301479] env[69994]: INFO nova.compute.manager [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Swapping old allocation on dict_keys(['92ce3c95-4efe-4d04-802b-6b187afc5aa7']) held by migration 95a615f0-9414-47d5-965f-8bcf9c644849 for instance [ 905.312556] env[69994]: INFO nova.compute.manager [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Took 49.17 seconds to build instance. [ 905.338806] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 905.340419] env[69994]: DEBUG nova.scheduler.client.report [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Overwriting current allocation {'allocations': {'92ce3c95-4efe-4d04-802b-6b187afc5aa7': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 90}}, 'project_id': '1aa7929b2e0d467c99c25acd8b7e92d6', 'user_id': '29861e0318bb4e5fa5d92379b063367c', 'consumer_generation': 1} on consumer f07750f5-3f1d-4d97-98dc-285ed357cc7e {{(pid=69994) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 905.343039] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-87c1be1f-bf00-440e-94a9-d97d8d9d40f3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.351741] env[69994]: DEBUG oslo_vmware.api [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 905.351741] env[69994]: value = "task-3242039" [ 905.351741] env[69994]: _type = "Task" [ 905.351741] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.361847] env[69994]: DEBUG oslo_vmware.api [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242039, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.444465] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquiring lock "refresh_cache-f07750f5-3f1d-4d97-98dc-285ed357cc7e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.444655] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquired lock "refresh_cache-f07750f5-3f1d-4d97-98dc-285ed357cc7e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 905.444845] env[69994]: DEBUG nova.network.neutron [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 905.619584] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242038, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.651397] env[69994]: DEBUG nova.compute.manager [req-ae025307-0d7d-49f5-8f83-d9f0e40d7661 req-a3a40d07-5d9d-4085-9462-bdca7a9c37f4 service nova] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Received event network-vif-deleted-772e74a8-0286-4533-b4ad-63afc1c9e6e4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 905.651633] env[69994]: INFO nova.compute.manager [req-ae025307-0d7d-49f5-8f83-d9f0e40d7661 req-a3a40d07-5d9d-4085-9462-bdca7a9c37f4 service nova] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Neutron deleted interface 772e74a8-0286-4533-b4ad-63afc1c9e6e4; detaching it from the instance and deleting it from the info cache [ 905.651817] env[69994]: DEBUG nova.network.neutron [req-ae025307-0d7d-49f5-8f83-d9f0e40d7661 req-a3a40d07-5d9d-4085-9462-bdca7a9c37f4 service nova] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.793974] env[69994]: DEBUG nova.network.neutron [req-c95f8cec-5d41-45c3-8d6b-cd545aed4f8a req-1d21dc70-b69f-470d-b114-81f8e52ad9fe service nova] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Updated VIF entry in instance network info cache for port 958fcea0-f2d3-40f0-9433-fefae2a2245d. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 905.794377] env[69994]: DEBUG nova.network.neutron [req-c95f8cec-5d41-45c3-8d6b-cd545aed4f8a req-1d21dc70-b69f-470d-b114-81f8e52ad9fe service nova] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Updating instance_info_cache with network_info: [{"id": "958fcea0-f2d3-40f0-9433-fefae2a2245d", "address": "fa:16:3e:aa:ec:38", "network": {"id": "49505810-30cb-4b94-b6ac-8b546a13a835", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-623797230-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d7c614ed22d4fa1af10f9374dcc90b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b911797-478d-4ee5-bce9-6f2f49014e94", "external-id": "nsx-vlan-transportzone-70", "segmentation_id": 70, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap958fcea0-f2", "ovs_interfaceid": "958fcea0-f2d3-40f0-9433-fefae2a2245d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.812267] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance c14851d2-66c5-4865-ae66-abbe303f0c31 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 905.815627] env[69994]: DEBUG nova.compute.manager [None req-29070d91-5318-4a32-a07c-45df93d1a66c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 905.815775] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-29070d91-5318-4a32-a07c-45df93d1a66c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 905.816697] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cc86bf0-a379-4959-9278-ff51dd0a87b1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.819636] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b233702f-e503-4bed-a45f-9e34fdc0762e tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Lock "c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.690s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 905.824793] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-29070d91-5318-4a32-a07c-45df93d1a66c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 905.825519] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-009e2c62-61d2-4f88-ac13-dba9d524947e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.834858] env[69994]: DEBUG oslo_vmware.api [None req-29070d91-5318-4a32-a07c-45df93d1a66c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for the task: (returnval){ [ 905.834858] env[69994]: value = "task-3242040" [ 905.834858] env[69994]: _type = "Task" [ 905.834858] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.843166] env[69994]: DEBUG oslo_vmware.api [None req-29070d91-5318-4a32-a07c-45df93d1a66c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3242040, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.860810] env[69994]: DEBUG oslo_vmware.api [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242039, 'name': PowerOffVM_Task, 'duration_secs': 0.201224} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.861076] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 905.861350] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 905.862303] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41b16c18-f210-4d2d-9ed3-e604a4c8219d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.868919] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fac2e267-f136-4920-831d-408d8be6e17f tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "45a8dced-6c49-441c-92e2-ee323ed8753c" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 905.869208] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fac2e267-f136-4920-831d-408d8be6e17f tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "45a8dced-6c49-441c-92e2-ee323ed8753c" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 905.869417] env[69994]: DEBUG nova.compute.manager [None req-fac2e267-f136-4920-831d-408d8be6e17f tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Going to confirm migration 3 {{(pid=69994) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 905.870822] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 905.870925] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-22e5d0c9-f2ea-480f-9cb3-02e844fea19a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.977348] env[69994]: DEBUG nova.network.neutron [-] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 906.121985] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242038, 'name': CreateVM_Task, 'duration_secs': 0.554199} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.121985] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 906.122472] env[69994]: DEBUG oslo_concurrency.lockutils [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.122629] env[69994]: DEBUG oslo_concurrency.lockutils [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 906.122944] env[69994]: DEBUG oslo_concurrency.lockutils [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 906.123221] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc20dd2c-539b-4c5c-bd49-05892f6a901a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.127670] env[69994]: DEBUG oslo_vmware.api [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Waiting for the task: (returnval){ [ 906.127670] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]529330d8-adc7-e3fa-ee3d-d2ae8ec97171" [ 906.127670] env[69994]: _type = "Task" [ 906.127670] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.136030] env[69994]: DEBUG oslo_vmware.api [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]529330d8-adc7-e3fa-ee3d-d2ae8ec97171, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.154668] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-006c737b-0823-4c7e-b835-4886452a9ef3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.165921] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b3e95aa-b047-4755-b8ca-f7deeedc2ac4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.208320] env[69994]: DEBUG nova.compute.manager [req-ae025307-0d7d-49f5-8f83-d9f0e40d7661 req-a3a40d07-5d9d-4085-9462-bdca7a9c37f4 service nova] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Detach interface failed, port_id=772e74a8-0286-4533-b4ad-63afc1c9e6e4, reason: Instance e3697388-4598-4dde-8c20-43fc7665083b could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 906.275620] env[69994]: DEBUG nova.network.neutron [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Updating instance_info_cache with network_info: [{"id": "03a2cce0-4737-45b4-8482-4eabd0e63386", "address": "fa:16:3e:fe:86:b3", "network": {"id": "f3e19305-d7dd-4b7e-9067-72c836ae3ff7", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.76", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "1110add503f24d308ace30fa5efaa3e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03a2cce0-47", "ovs_interfaceid": "03a2cce0-4737-45b4-8482-4eabd0e63386", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 906.297351] env[69994]: DEBUG oslo_concurrency.lockutils [req-c95f8cec-5d41-45c3-8d6b-cd545aed4f8a req-1d21dc70-b69f-470d-b114-81f8e52ad9fe service nova] Releasing lock "refresh_cache-4ca53416-caed-418c-bb40-cabb8e311803" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 906.314967] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance ef37ce64-2c26-4080-899a-6d9dbb5850c9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 906.345684] env[69994]: DEBUG oslo_vmware.api [None req-29070d91-5318-4a32-a07c-45df93d1a66c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3242040, 'name': PowerOffVM_Task, 'duration_secs': 0.312962} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.345684] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-29070d91-5318-4a32-a07c-45df93d1a66c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 906.345851] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-29070d91-5318-4a32-a07c-45df93d1a66c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 906.345998] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a23b6026-2ebf-4623-8227-9ebd47220cbf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.412338] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fac2e267-f136-4920-831d-408d8be6e17f tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "refresh_cache-45a8dced-6c49-441c-92e2-ee323ed8753c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.412548] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fac2e267-f136-4920-831d-408d8be6e17f tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquired lock "refresh_cache-45a8dced-6c49-441c-92e2-ee323ed8753c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 906.412743] env[69994]: DEBUG nova.network.neutron [None req-fac2e267-f136-4920-831d-408d8be6e17f tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 906.412947] env[69994]: DEBUG nova.objects.instance [None req-fac2e267-f136-4920-831d-408d8be6e17f tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lazy-loading 'info_cache' on Instance uuid 45a8dced-6c49-441c-92e2-ee323ed8753c {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 906.479994] env[69994]: INFO nova.compute.manager [-] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Took 1.40 seconds to deallocate network for instance. [ 906.532560] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0fde11a6-4db5-4b29-9b79-731336ca5aaa tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Acquiring lock "c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 906.532838] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0fde11a6-4db5-4b29-9b79-731336ca5aaa tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Lock "c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 906.533053] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0fde11a6-4db5-4b29-9b79-731336ca5aaa tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Acquiring lock "c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 906.533245] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0fde11a6-4db5-4b29-9b79-731336ca5aaa tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Lock "c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 906.533416] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0fde11a6-4db5-4b29-9b79-731336ca5aaa tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Lock "c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 906.535493] env[69994]: INFO nova.compute.manager [None req-0fde11a6-4db5-4b29-9b79-731336ca5aaa tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Terminating instance [ 906.540479] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 906.540675] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 906.540857] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Deleting the datastore file [datastore1] 87473dd1-458d-4ef4-a1bd-7e653e509ea4 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 906.541124] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fdcaca6e-d5ff-4d9b-9b38-2ac1ea2ca2de {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.549016] env[69994]: DEBUG oslo_vmware.api [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 906.549016] env[69994]: value = "task-3242043" [ 906.549016] env[69994]: _type = "Task" [ 906.549016] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.554562] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-29070d91-5318-4a32-a07c-45df93d1a66c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 906.554865] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-29070d91-5318-4a32-a07c-45df93d1a66c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 906.555135] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-29070d91-5318-4a32-a07c-45df93d1a66c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Deleting the datastore file [datastore2] 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 906.558413] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f26180ee-66a8-4074-960b-d35b754854da {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.560536] env[69994]: DEBUG oslo_vmware.api [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242043, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.564208] env[69994]: DEBUG oslo_vmware.api [None req-29070d91-5318-4a32-a07c-45df93d1a66c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for the task: (returnval){ [ 906.564208] env[69994]: value = "task-3242044" [ 906.564208] env[69994]: _type = "Task" [ 906.564208] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.572727] env[69994]: DEBUG oslo_vmware.api [None req-29070d91-5318-4a32-a07c-45df93d1a66c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3242044, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.637546] env[69994]: DEBUG oslo_vmware.api [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]529330d8-adc7-e3fa-ee3d-d2ae8ec97171, 'name': SearchDatastore_Task, 'duration_secs': 0.011021} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.637858] env[69994]: DEBUG oslo_concurrency.lockutils [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 906.638142] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 906.638413] env[69994]: DEBUG oslo_concurrency.lockutils [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.638626] env[69994]: DEBUG oslo_concurrency.lockutils [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 906.638773] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 906.639040] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0b5ac89e-27df-43de-9329-05059206abfc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.655837] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 906.656057] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 906.656801] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ffc7ec61-699a-452e-9d27-25ca317c5bcc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.662021] env[69994]: DEBUG oslo_vmware.api [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Waiting for the task: (returnval){ [ 906.662021] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]528337d0-c556-9abb-8975-65a354247e27" [ 906.662021] env[69994]: _type = "Task" [ 906.662021] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.669551] env[69994]: DEBUG oslo_vmware.api [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]528337d0-c556-9abb-8975-65a354247e27, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.779883] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Releasing lock "refresh_cache-f07750f5-3f1d-4d97-98dc-285ed357cc7e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 906.780395] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 906.780678] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-59055b29-1474-4181-8a9e-cf2ee5b171b6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.788039] env[69994]: DEBUG oslo_vmware.api [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for the task: (returnval){ [ 906.788039] env[69994]: value = "task-3242045" [ 906.788039] env[69994]: _type = "Task" [ 906.788039] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.798501] env[69994]: DEBUG oslo_vmware.api [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3242045, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.818097] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 80705dfe-4768-4f35-8acf-316b15814f78 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 906.987176] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1b9acb53-e17c-4b6a-ba36-5ff3042416d2 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 907.039759] env[69994]: DEBUG nova.compute.manager [None req-0fde11a6-4db5-4b29-9b79-731336ca5aaa tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 907.039995] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0fde11a6-4db5-4b29-9b79-731336ca5aaa tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 907.040924] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-023efd1a-69fa-4480-a275-53f794e1012d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.048440] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fde11a6-4db5-4b29-9b79-731336ca5aaa tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 907.048686] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f8afbedd-c069-4165-a281-5084f2c8cebc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.059319] env[69994]: DEBUG oslo_vmware.api [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242043, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.153434} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.061260] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 907.061260] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 907.061260] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 907.064035] env[69994]: DEBUG oslo_vmware.api [None req-0fde11a6-4db5-4b29-9b79-731336ca5aaa tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Waiting for the task: (returnval){ [ 907.064035] env[69994]: value = "task-3242046" [ 907.064035] env[69994]: _type = "Task" [ 907.064035] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.075025] env[69994]: DEBUG oslo_vmware.api [None req-0fde11a6-4db5-4b29-9b79-731336ca5aaa tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Task: {'id': task-3242046, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.079020] env[69994]: DEBUG oslo_vmware.api [None req-29070d91-5318-4a32-a07c-45df93d1a66c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3242044, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159998} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.079292] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-29070d91-5318-4a32-a07c-45df93d1a66c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 907.079484] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-29070d91-5318-4a32-a07c-45df93d1a66c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 907.079664] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-29070d91-5318-4a32-a07c-45df93d1a66c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 907.079837] env[69994]: INFO nova.compute.manager [None req-29070d91-5318-4a32-a07c-45df93d1a66c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Took 1.26 seconds to destroy the instance on the hypervisor. [ 907.080081] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-29070d91-5318-4a32-a07c-45df93d1a66c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 907.080621] env[69994]: DEBUG nova.compute.manager [-] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 907.080717] env[69994]: DEBUG nova.network.neutron [-] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 907.174361] env[69994]: DEBUG oslo_vmware.api [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]528337d0-c556-9abb-8975-65a354247e27, 'name': SearchDatastore_Task, 'duration_secs': 0.009897} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.175289] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-221b4680-1b68-4769-a214-8a03bd507311 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.183892] env[69994]: DEBUG oslo_vmware.api [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Waiting for the task: (returnval){ [ 907.183892] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52fa6102-4469-04b3-0598-640a04188f10" [ 907.183892] env[69994]: _type = "Task" [ 907.183892] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.191443] env[69994]: DEBUG oslo_vmware.api [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52fa6102-4469-04b3-0598-640a04188f10, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.298182] env[69994]: DEBUG oslo_vmware.api [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3242045, 'name': PowerOffVM_Task, 'duration_secs': 0.224232} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.298182] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 907.298992] env[69994]: DEBUG nova.virt.hardware [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:40:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='60367b47-c076-4b83-be63-6ff8f43248be',id=35,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-590586289',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 907.299388] env[69994]: DEBUG nova.virt.hardware [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 907.299646] env[69994]: DEBUG nova.virt.hardware [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 907.299917] env[69994]: DEBUG nova.virt.hardware [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 907.300181] env[69994]: DEBUG nova.virt.hardware [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 907.300588] env[69994]: DEBUG nova.virt.hardware [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 907.300903] env[69994]: DEBUG nova.virt.hardware [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 907.301188] env[69994]: DEBUG nova.virt.hardware [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 907.301483] env[69994]: DEBUG nova.virt.hardware [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 907.301756] env[69994]: DEBUG nova.virt.hardware [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 907.302040] env[69994]: DEBUG nova.virt.hardware [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 907.307451] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-358bfac8-b0d7-432f-a5dc-432f08f8ceca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.320613] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 17389887-5463-44e1-b1c0-f123d8dedec7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 907.321047] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Total usable vcpus: 48, total allocated vcpus: 21 {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 907.321313] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4736MB phys_disk=200GB used_disk=21GB total_vcpus=48 used_vcpus=21 pci_stats=[] {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 907.327440] env[69994]: DEBUG oslo_vmware.api [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for the task: (returnval){ [ 907.327440] env[69994]: value = "task-3242047" [ 907.327440] env[69994]: _type = "Task" [ 907.327440] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.334375] env[69994]: DEBUG oslo_vmware.api [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3242047, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.589827] env[69994]: DEBUG oslo_vmware.api [None req-0fde11a6-4db5-4b29-9b79-731336ca5aaa tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Task: {'id': task-3242046, 'name': PowerOffVM_Task, 'duration_secs': 0.168756} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.593449] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fde11a6-4db5-4b29-9b79-731336ca5aaa tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 907.593536] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0fde11a6-4db5-4b29-9b79-731336ca5aaa tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 907.593941] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ed58357e-baac-44ce-83b9-045c5172e5eb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.655907] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0fde11a6-4db5-4b29-9b79-731336ca5aaa tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 907.656146] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0fde11a6-4db5-4b29-9b79-731336ca5aaa tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 907.656538] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fde11a6-4db5-4b29-9b79-731336ca5aaa tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Deleting the datastore file [datastore1] c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 907.656626] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3c64cd59-a43e-4c96-9dc4-900bf4001e76 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.667949] env[69994]: DEBUG oslo_vmware.api [None req-0fde11a6-4db5-4b29-9b79-731336ca5aaa tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Waiting for the task: (returnval){ [ 907.667949] env[69994]: value = "task-3242049" [ 907.667949] env[69994]: _type = "Task" [ 907.667949] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.677635] env[69994]: DEBUG oslo_vmware.api [None req-0fde11a6-4db5-4b29-9b79-731336ca5aaa tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Task: {'id': task-3242049, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.678792] env[69994]: DEBUG nova.network.neutron [None req-fac2e267-f136-4920-831d-408d8be6e17f tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Updating instance_info_cache with network_info: [{"id": "003af7d4-a8a5-43d4-b032-96df0b4ae173", "address": "fa:16:3e:a4:62:49", "network": {"id": "6d4a143e-4f55-4918-8454-462892aacf0e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1594130263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "605d72502cc644bfa4d875bf348246de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52e117d3-d120-42c6-8e72-70085845acbf", "external-id": "nsx-vlan-transportzone-934", "segmentation_id": 934, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap003af7d4-a8", "ovs_interfaceid": "003af7d4-a8a5-43d4-b032-96df0b4ae173", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.693730] env[69994]: DEBUG oslo_vmware.api [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52fa6102-4469-04b3-0598-640a04188f10, 'name': SearchDatastore_Task, 'duration_secs': 0.009773} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.696933] env[69994]: DEBUG oslo_concurrency.lockutils [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 907.697243] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 4ca53416-caed-418c-bb40-cabb8e311803/4ca53416-caed-418c-bb40-cabb8e311803.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 907.697678] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ea0da545-365e-40af-8a12-8d29e17158c7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.708826] env[69994]: DEBUG oslo_vmware.api [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Waiting for the task: (returnval){ [ 907.708826] env[69994]: value = "task-3242050" [ 907.708826] env[69994]: _type = "Task" [ 907.708826] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.720940] env[69994]: DEBUG oslo_vmware.api [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Task: {'id': task-3242050, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.826186] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-527169b6-a72e-4c2b-a1b0-5921bae4f2f9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.839904] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e71de70d-cae5-4d0e-9871-378bd4d605c7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.843845] env[69994]: DEBUG oslo_vmware.api [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3242047, 'name': ReconfigVM_Task, 'duration_secs': 0.139835} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.844843] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7737c7f7-510e-4148-a29b-1bd0c12cb1b5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.875178] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-420626f8-bf73-4bb9-b17c-5c1744bfd7df {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.897771] env[69994]: DEBUG nova.virt.hardware [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:40:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='60367b47-c076-4b83-be63-6ff8f43248be',id=35,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-590586289',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 907.898033] env[69994]: DEBUG nova.virt.hardware [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 907.898214] env[69994]: DEBUG nova.virt.hardware [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 907.898374] env[69994]: DEBUG nova.virt.hardware [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 907.898645] env[69994]: DEBUG nova.virt.hardware [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 907.898828] env[69994]: DEBUG nova.virt.hardware [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 907.899051] env[69994]: DEBUG nova.virt.hardware [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 907.899261] env[69994]: DEBUG nova.virt.hardware [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 907.899444] env[69994]: DEBUG nova.virt.hardware [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 907.899610] env[69994]: DEBUG nova.virt.hardware [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 907.899782] env[69994]: DEBUG nova.virt.hardware [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 907.904384] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-831e7c2c-9eb0-4534-8b4a-120e137b8b86 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.907894] env[69994]: DEBUG nova.compute.manager [req-fcb6ce52-2e6c-4d7e-9320-ca08f195942f req-a81a21e0-4a37-4f03-9bdf-0e7abce01666 service nova] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Received event network-vif-deleted-f2652bdf-bba7-4a73-9045-397e55945ed1 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 907.908069] env[69994]: INFO nova.compute.manager [req-fcb6ce52-2e6c-4d7e-9320-ca08f195942f req-a81a21e0-4a37-4f03-9bdf-0e7abce01666 service nova] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Neutron deleted interface f2652bdf-bba7-4a73-9045-397e55945ed1; detaching it from the instance and deleting it from the info cache [ 907.908272] env[69994]: DEBUG nova.network.neutron [req-fcb6ce52-2e6c-4d7e-9320-ca08f195942f req-a81a21e0-4a37-4f03-9bdf-0e7abce01666 service nova] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.911041] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfea7595-b2f8-47fa-b0ba-732b5b86186d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.919752] env[69994]: DEBUG oslo_vmware.api [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for the task: (returnval){ [ 907.919752] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5215b108-9755-a366-59d7-11d3ad897cf8" [ 907.919752] env[69994]: _type = "Task" [ 907.919752] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.932461] env[69994]: DEBUG nova.compute.provider_tree [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 907.945201] env[69994]: DEBUG oslo_vmware.api [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5215b108-9755-a366-59d7-11d3ad897cf8, 'name': SearchDatastore_Task, 'duration_secs': 0.008258} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.950877] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Reconfiguring VM instance instance-0000002f to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 907.951565] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-61089a1b-dc45-43c2-b778-d6cb5976859f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.974771] env[69994]: DEBUG oslo_vmware.api [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for the task: (returnval){ [ 907.974771] env[69994]: value = "task-3242051" [ 907.974771] env[69994]: _type = "Task" [ 907.974771] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.984826] env[69994]: DEBUG oslo_vmware.api [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3242051, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.110731] env[69994]: DEBUG nova.virt.hardware [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 908.111062] env[69994]: DEBUG nova.virt.hardware [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 908.111377] env[69994]: DEBUG nova.virt.hardware [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 908.111695] env[69994]: DEBUG nova.virt.hardware [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 908.111948] env[69994]: DEBUG nova.virt.hardware [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 908.112212] env[69994]: DEBUG nova.virt.hardware [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 908.112561] env[69994]: DEBUG nova.virt.hardware [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 908.112830] env[69994]: DEBUG nova.virt.hardware [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 908.113133] env[69994]: DEBUG nova.virt.hardware [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 908.113408] env[69994]: DEBUG nova.virt.hardware [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 908.113696] env[69994]: DEBUG nova.virt.hardware [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 908.114691] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27088551-a0cf-49fa-a1eb-2cc4233a30aa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.125058] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a1b43e1-272d-4282-bcb0-58a6e65e3c0c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.144917] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0a:5e:32', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c66a277b-e3bf-43b8-a632-04fdd0720b91', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b7e8be98-685a-4d07-9440-e07af619b026', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 908.153675] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 908.154344] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 908.154643] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8d6f49e2-6743-4b88-b362-c12d89343ab0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.183509] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fac2e267-f136-4920-831d-408d8be6e17f tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Releasing lock "refresh_cache-45a8dced-6c49-441c-92e2-ee323ed8753c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 908.183838] env[69994]: DEBUG nova.objects.instance [None req-fac2e267-f136-4920-831d-408d8be6e17f tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lazy-loading 'migration_context' on Instance uuid 45a8dced-6c49-441c-92e2-ee323ed8753c {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 908.185291] env[69994]: DEBUG oslo_vmware.api [None req-0fde11a6-4db5-4b29-9b79-731336ca5aaa tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Task: {'id': task-3242049, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.135772} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.186976] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fde11a6-4db5-4b29-9b79-731336ca5aaa tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 908.187278] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0fde11a6-4db5-4b29-9b79-731336ca5aaa tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 908.187513] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0fde11a6-4db5-4b29-9b79-731336ca5aaa tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 908.187807] env[69994]: INFO nova.compute.manager [None req-0fde11a6-4db5-4b29-9b79-731336ca5aaa tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Took 1.15 seconds to destroy the instance on the hypervisor. [ 908.188127] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0fde11a6-4db5-4b29-9b79-731336ca5aaa tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 908.188443] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 908.188443] env[69994]: value = "task-3242052" [ 908.188443] env[69994]: _type = "Task" [ 908.188443] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.188629] env[69994]: DEBUG nova.compute.manager [-] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 908.188730] env[69994]: DEBUG nova.network.neutron [-] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 908.202826] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242052, 'name': CreateVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.220586] env[69994]: DEBUG oslo_vmware.api [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Task: {'id': task-3242050, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.380759] env[69994]: DEBUG nova.network.neutron [-] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.419030] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ea06185e-ec1d-41e5-ab5f-7a42ef6fce45 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.429332] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20d3a26a-4cd8-425d-bc5d-b0a53995502a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.442710] env[69994]: DEBUG nova.scheduler.client.report [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 908.475354] env[69994]: DEBUG nova.compute.manager [req-fcb6ce52-2e6c-4d7e-9320-ca08f195942f req-a81a21e0-4a37-4f03-9bdf-0e7abce01666 service nova] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Detach interface failed, port_id=f2652bdf-bba7-4a73-9045-397e55945ed1, reason: Instance 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 908.485047] env[69994]: DEBUG oslo_vmware.api [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3242051, 'name': ReconfigVM_Task, 'duration_secs': 0.23244} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.485700] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Reconfigured VM instance instance-0000002f to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 908.486645] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85a21936-e43d-4e08-a1c7-dce46bbb79a5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.510496] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] f07750f5-3f1d-4d97-98dc-285ed357cc7e/f07750f5-3f1d-4d97-98dc-285ed357cc7e.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 908.510972] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bbc8cf4f-1a5a-4acb-a647-8dbdd0228fb4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.534020] env[69994]: DEBUG oslo_vmware.api [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for the task: (returnval){ [ 908.534020] env[69994]: value = "task-3242053" [ 908.534020] env[69994]: _type = "Task" [ 908.534020] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.542037] env[69994]: DEBUG oslo_vmware.api [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3242053, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.686670] env[69994]: DEBUG nova.objects.base [None req-fac2e267-f136-4920-831d-408d8be6e17f tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Object Instance<45a8dced-6c49-441c-92e2-ee323ed8753c> lazy-loaded attributes: info_cache,migration_context {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 908.689024] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91008b69-754f-473c-aca5-a157a79a598c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.720689] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65238caf-5e0d-4e37-bd84-e683fba03741 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.723502] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242052, 'name': CreateVM_Task, 'duration_secs': 0.374426} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.724022] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 908.726086] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.726086] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 908.726237] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 908.727027] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c53a24b2-39d3-4c79-9f0b-7a60c78c5ce7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.733524] env[69994]: DEBUG oslo_vmware.api [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Task: {'id': task-3242050, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.540112} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.734039] env[69994]: DEBUG oslo_vmware.api [None req-fac2e267-f136-4920-831d-408d8be6e17f tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 908.734039] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52480ee3-7aca-a3e3-c71b-11d62c4926d9" [ 908.734039] env[69994]: _type = "Task" [ 908.734039] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.734847] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 4ca53416-caed-418c-bb40-cabb8e311803/4ca53416-caed-418c-bb40-cabb8e311803.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 908.735196] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 908.735491] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9a394769-b60b-46f4-bea1-c1437f151ca6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.742782] env[69994]: DEBUG oslo_vmware.api [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 908.742782] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52ef6ff1-2fcf-e6dc-03c1-f3ca5d2de104" [ 908.742782] env[69994]: _type = "Task" [ 908.742782] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.753334] env[69994]: DEBUG oslo_vmware.api [None req-fac2e267-f136-4920-831d-408d8be6e17f tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52480ee3-7aca-a3e3-c71b-11d62c4926d9, 'name': SearchDatastore_Task, 'duration_secs': 0.007461} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.755441] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fac2e267-f136-4920-831d-408d8be6e17f tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 908.755569] env[69994]: DEBUG oslo_vmware.api [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Waiting for the task: (returnval){ [ 908.755569] env[69994]: value = "task-3242054" [ 908.755569] env[69994]: _type = "Task" [ 908.755569] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.762268] env[69994]: DEBUG oslo_vmware.api [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52ef6ff1-2fcf-e6dc-03c1-f3ca5d2de104, 'name': SearchDatastore_Task, 'duration_secs': 0.009018} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.762934] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 908.763207] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 908.763553] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.763681] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 908.763871] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 908.764292] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-55fdc0f0-2c7d-4838-bb73-20f9ac79851d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.769964] env[69994]: DEBUG oslo_vmware.api [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Task: {'id': task-3242054, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.779389] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 908.779389] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 908.779389] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99496ab7-5792-48f0-af62-7dded182929f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.782956] env[69994]: DEBUG oslo_vmware.api [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 908.782956] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52115e80-1cc7-6daa-8208-b88d2779e174" [ 908.782956] env[69994]: _type = "Task" [ 908.782956] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.791410] env[69994]: DEBUG oslo_vmware.api [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52115e80-1cc7-6daa-8208-b88d2779e174, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.887671] env[69994]: INFO nova.compute.manager [-] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Took 1.81 seconds to deallocate network for instance. [ 908.949422] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69994) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 908.949679] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 5.214s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 908.949944] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.935s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 908.951604] env[69994]: INFO nova.compute.claims [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 908.954323] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 908.954477] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Cleaning up deleted instances {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11834}} [ 909.041812] env[69994]: DEBUG oslo_vmware.api [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3242053, 'name': ReconfigVM_Task, 'duration_secs': 0.311965} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.042093] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Reconfigured VM instance instance-0000002f to attach disk [datastore1] f07750f5-3f1d-4d97-98dc-285ed357cc7e/f07750f5-3f1d-4d97-98dc-285ed357cc7e.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 909.042897] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d9305b3-4bf3-4919-9390-707b53eecc17 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.060666] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa222747-6f6d-43c9-b2f0-7269ad355c69 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.078667] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39feca74-8a27-41a6-bb43-15817405ad3d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.096235] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fe329de-3aa6-4212-b022-4c1d0716f158 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.102406] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 909.102652] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-14887c8b-28f3-4e40-b0db-07825cfe34b9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.108220] env[69994]: DEBUG oslo_vmware.api [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for the task: (returnval){ [ 909.108220] env[69994]: value = "task-3242055" [ 909.108220] env[69994]: _type = "Task" [ 909.108220] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.117497] env[69994]: DEBUG oslo_vmware.api [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3242055, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.265330] env[69994]: DEBUG oslo_vmware.api [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Task: {'id': task-3242054, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070801} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.265615] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 909.266428] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-225d7886-b6a5-4ab5-b70f-fd680818458f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.288018] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Reconfiguring VM instance instance-00000044 to attach disk [datastore2] 4ca53416-caed-418c-bb40-cabb8e311803/4ca53416-caed-418c-bb40-cabb8e311803.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 909.288416] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c1009c84-0050-43b3-a089-100b15e6226d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.311996] env[69994]: DEBUG oslo_vmware.api [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52115e80-1cc7-6daa-8208-b88d2779e174, 'name': SearchDatastore_Task, 'duration_secs': 0.00995} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.313841] env[69994]: DEBUG oslo_vmware.api [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Waiting for the task: (returnval){ [ 909.313841] env[69994]: value = "task-3242056" [ 909.313841] env[69994]: _type = "Task" [ 909.313841] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.314052] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39a933a5-fe0d-4a1b-af46-3528e6f3b80c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.316216] env[69994]: DEBUG nova.network.neutron [-] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.323278] env[69994]: DEBUG oslo_vmware.api [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 909.323278] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]520877c3-c57a-b70e-8e73-18e1ac4779fc" [ 909.323278] env[69994]: _type = "Task" [ 909.323278] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.326609] env[69994]: DEBUG oslo_vmware.api [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Task: {'id': task-3242056, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.335049] env[69994]: DEBUG oslo_vmware.api [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]520877c3-c57a-b70e-8e73-18e1ac4779fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.394941] env[69994]: DEBUG oslo_concurrency.lockutils [None req-29070d91-5318-4a32-a07c-45df93d1a66c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 909.470774] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] There are 42 instances to clean {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11843}} [ 909.471086] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: ffe5f2c6-69e7-4bdb-80d1-b421b695e790] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 909.618318] env[69994]: DEBUG oslo_vmware.api [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3242055, 'name': PowerOnVM_Task, 'duration_secs': 0.397188} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.618575] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 909.822551] env[69994]: INFO nova.compute.manager [-] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Took 1.63 seconds to deallocate network for instance. [ 909.835894] env[69994]: DEBUG oslo_vmware.api [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Task: {'id': task-3242056, 'name': ReconfigVM_Task, 'duration_secs': 0.3323} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.836607] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Reconfigured VM instance instance-00000044 to attach disk [datastore2] 4ca53416-caed-418c-bb40-cabb8e311803/4ca53416-caed-418c-bb40-cabb8e311803.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 909.838178] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5a3b27ca-93e1-48ae-9d51-9821e1c084ff {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.845922] env[69994]: DEBUG oslo_vmware.api [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]520877c3-c57a-b70e-8e73-18e1ac4779fc, 'name': SearchDatastore_Task, 'duration_secs': 0.015407} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.846738] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 909.847156] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 87473dd1-458d-4ef4-a1bd-7e653e509ea4/87473dd1-458d-4ef4-a1bd-7e653e509ea4.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 909.847524] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ab061317-0a12-484e-9ada-8ed7291b8be3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.851690] env[69994]: DEBUG oslo_vmware.api [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Waiting for the task: (returnval){ [ 909.851690] env[69994]: value = "task-3242057" [ 909.851690] env[69994]: _type = "Task" [ 909.851690] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.856177] env[69994]: DEBUG oslo_vmware.api [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 909.856177] env[69994]: value = "task-3242058" [ 909.856177] env[69994]: _type = "Task" [ 909.856177] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.866621] env[69994]: DEBUG oslo_vmware.api [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Task: {'id': task-3242057, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.873607] env[69994]: DEBUG oslo_vmware.api [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242058, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.967135] env[69994]: DEBUG nova.compute.manager [req-4e44afb0-1b1d-433d-a644-9734d3546023 req-55919bcf-ec1d-48ba-be26-835aca97ca96 service nova] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Received event network-vif-deleted-596c8b01-0040-4d26-9668-1847a813bc88 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 909.977386] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 6d99c52e-8893-4ad7-8d8e-56bd8c9379b8] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 910.338082] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0fde11a6-4db5-4b29-9b79-731336ca5aaa tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 910.367711] env[69994]: DEBUG oslo_vmware.api [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Task: {'id': task-3242057, 'name': Rename_Task, 'duration_secs': 0.219169} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.368015] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 910.368292] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-62408a9a-b641-4df9-9050-09bbc5bc0269 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.373138] env[69994]: DEBUG oslo_vmware.api [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242058, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.512416} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.373787] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 87473dd1-458d-4ef4-a1bd-7e653e509ea4/87473dd1-458d-4ef4-a1bd-7e653e509ea4.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 910.373981] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 910.376589] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-08b19682-0ad5-480f-8ecf-b6a7fa30492e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.379690] env[69994]: DEBUG oslo_vmware.api [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Waiting for the task: (returnval){ [ 910.379690] env[69994]: value = "task-3242059" [ 910.379690] env[69994]: _type = "Task" [ 910.379690] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.384246] env[69994]: DEBUG oslo_vmware.api [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 910.384246] env[69994]: value = "task-3242060" [ 910.384246] env[69994]: _type = "Task" [ 910.384246] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.390719] env[69994]: DEBUG oslo_vmware.api [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Task: {'id': task-3242059, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.398600] env[69994]: DEBUG oslo_vmware.api [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242060, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.423662] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ef07088-a109-4e92-ac8f-51e462ad0803 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.432072] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3d54d9e-8a02-4726-9465-059820195f08 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.464244] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b99cf3eb-87d7-411e-912e-3408b21a8365 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.472391] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2344ba70-593f-4a1b-ad12-f649fe20fa48 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.487182] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 2d812174-d2ad-4fac-8ae5-ffa51d691374] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 910.490062] env[69994]: DEBUG nova.compute.provider_tree [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 910.630964] env[69994]: INFO nova.compute.manager [None req-e61778d5-9144-47d2-a961-da7fe1392dc4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Updating instance to original state: 'active' [ 910.892800] env[69994]: DEBUG oslo_vmware.api [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Task: {'id': task-3242059, 'name': PowerOnVM_Task, 'duration_secs': 0.470735} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.895682] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 910.895953] env[69994]: INFO nova.compute.manager [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Took 8.31 seconds to spawn the instance on the hypervisor. [ 910.896171] env[69994]: DEBUG nova.compute.manager [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 910.896497] env[69994]: DEBUG oslo_vmware.api [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242060, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073322} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.897184] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c417e1fd-8e42-4e0b-a569-e8b1b0948b37 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.899457] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 910.900233] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3942957c-b7d4-4d06-91f5-1f4d7bf889f9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.926320] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Reconfiguring VM instance instance-00000021 to attach disk [datastore2] 87473dd1-458d-4ef4-a1bd-7e653e509ea4/87473dd1-458d-4ef4-a1bd-7e653e509ea4.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 910.926803] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d618e2b4-acff-48fd-b27b-5e679a7f330b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.944991] env[69994]: DEBUG oslo_vmware.api [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 910.944991] env[69994]: value = "task-3242061" [ 910.944991] env[69994]: _type = "Task" [ 910.944991] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.953728] env[69994]: DEBUG oslo_vmware.api [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242061, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.992829] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: dca638aa-c491-431f-a0e5-d02bd76705ad] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 910.995630] env[69994]: DEBUG nova.scheduler.client.report [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 911.435055] env[69994]: INFO nova.compute.manager [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Took 48.78 seconds to build instance. [ 911.455426] env[69994]: DEBUG oslo_vmware.api [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242061, 'name': ReconfigVM_Task, 'duration_secs': 0.285515} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.456362] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Reconfigured VM instance instance-00000021 to attach disk [datastore2] 87473dd1-458d-4ef4-a1bd-7e653e509ea4/87473dd1-458d-4ef4-a1bd-7e653e509ea4.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 911.456992] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ccc0a591-aa4c-4352-8a3b-aff67139fc68 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.463845] env[69994]: DEBUG oslo_vmware.api [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 911.463845] env[69994]: value = "task-3242062" [ 911.463845] env[69994]: _type = "Task" [ 911.463845] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.472073] env[69994]: DEBUG oslo_vmware.api [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242062, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.499678] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: df907dda-c1a0-4aaa-8ab2-5bb8e2ac38df] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 911.506027] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.552s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.506027] env[69994]: DEBUG nova.compute.manager [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 911.506027] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d25ce6cd-9539-49dc-809e-0c3904d883f2 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.985s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 911.506027] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d25ce6cd-9539-49dc-809e-0c3904d883f2 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.508317] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c7d2cbdf-29ca-4200-86e0-82493e8efbec tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.849s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 911.508452] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c7d2cbdf-29ca-4200-86e0-82493e8efbec tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.510569] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.497s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 911.512191] env[69994]: INFO nova.compute.claims [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 911.566790] env[69994]: INFO nova.scheduler.client.report [None req-d25ce6cd-9539-49dc-809e-0c3904d883f2 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Deleted allocations for instance cd5a47f2-147b-4e50-980d-8e1c40bc7594 [ 911.570479] env[69994]: INFO nova.scheduler.client.report [None req-c7d2cbdf-29ca-4200-86e0-82493e8efbec tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Deleted allocations for instance 14b28a21-1b71-4d7e-bd6c-269f5d588300 [ 911.937635] env[69994]: DEBUG oslo_concurrency.lockutils [None req-00d27b3d-1856-4f47-88c6-8703ecf9c0a7 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Lock "4ca53416-caed-418c-bb40-cabb8e311803" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.764s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.973997] env[69994]: DEBUG oslo_vmware.api [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242062, 'name': Rename_Task, 'duration_secs': 0.137313} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.974297] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 911.974539] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2255726d-93df-4326-830c-0defa4e26fde {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.980842] env[69994]: DEBUG oslo_vmware.api [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 911.980842] env[69994]: value = "task-3242063" [ 911.980842] env[69994]: _type = "Task" [ 911.980842] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.988758] env[69994]: DEBUG oslo_vmware.api [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242063, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.006923] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 86e514bb-8b47-4605-bd85-55c6c9874320] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 912.018845] env[69994]: DEBUG nova.compute.utils [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 912.023277] env[69994]: DEBUG nova.compute.manager [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 912.023404] env[69994]: DEBUG nova.network.neutron [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 912.081588] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c7d2cbdf-29ca-4200-86e0-82493e8efbec tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "14b28a21-1b71-4d7e-bd6c-269f5d588300" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.061s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 912.083091] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d25ce6cd-9539-49dc-809e-0c3904d883f2 tempest-MultipleCreateTestJSON-688441779 tempest-MultipleCreateTestJSON-688441779-project-member] Lock "cd5a47f2-147b-4e50-980d-8e1c40bc7594" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.184s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 912.087286] env[69994]: DEBUG nova.policy [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3530c79ac01543348cda39eeee2dab47', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aacff9947d8149e084d2b17f07f3d24c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 912.352602] env[69994]: DEBUG oslo_concurrency.lockutils [None req-37d94b9c-e15f-4db7-b5e7-3b0ab480463b tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquiring lock "f07750f5-3f1d-4d97-98dc-285ed357cc7e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 912.353598] env[69994]: DEBUG oslo_concurrency.lockutils [None req-37d94b9c-e15f-4db7-b5e7-3b0ab480463b tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Lock "f07750f5-3f1d-4d97-98dc-285ed357cc7e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 912.356017] env[69994]: DEBUG oslo_concurrency.lockutils [None req-37d94b9c-e15f-4db7-b5e7-3b0ab480463b tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquiring lock "f07750f5-3f1d-4d97-98dc-285ed357cc7e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 912.356017] env[69994]: DEBUG oslo_concurrency.lockutils [None req-37d94b9c-e15f-4db7-b5e7-3b0ab480463b tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Lock "f07750f5-3f1d-4d97-98dc-285ed357cc7e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 912.356017] env[69994]: DEBUG oslo_concurrency.lockutils [None req-37d94b9c-e15f-4db7-b5e7-3b0ab480463b tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Lock "f07750f5-3f1d-4d97-98dc-285ed357cc7e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 912.356670] env[69994]: INFO nova.compute.manager [None req-37d94b9c-e15f-4db7-b5e7-3b0ab480463b tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Terminating instance [ 912.497766] env[69994]: DEBUG oslo_vmware.api [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242063, 'name': PowerOnVM_Task, 'duration_secs': 0.438886} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.497766] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 912.497766] env[69994]: DEBUG nova.compute.manager [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 912.498934] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3696429f-38c5-4812-8eef-059852f17a26 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.511454] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: d4f87534-813e-4ff6-8b1f-ee23cb0b8e80] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 912.522072] env[69994]: DEBUG nova.network.neutron [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Successfully created port: 58a0ef78-0177-4996-ba8f-adbf83a9c0e6 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 912.527224] env[69994]: DEBUG nova.compute.manager [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 912.862763] env[69994]: DEBUG nova.compute.manager [None req-37d94b9c-e15f-4db7-b5e7-3b0ab480463b tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 912.863017] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-37d94b9c-e15f-4db7-b5e7-3b0ab480463b tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 912.869583] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8a59366-800e-4e8e-99aa-3687ea0ae720 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.878190] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-37d94b9c-e15f-4db7-b5e7-3b0ab480463b tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 912.878190] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3fcc532b-df9e-4632-9f71-81b81e40f8dc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.884127] env[69994]: DEBUG oslo_vmware.api [None req-37d94b9c-e15f-4db7-b5e7-3b0ab480463b tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for the task: (returnval){ [ 912.884127] env[69994]: value = "task-3242064" [ 912.884127] env[69994]: _type = "Task" [ 912.884127] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.898826] env[69994]: DEBUG oslo_vmware.api [None req-37d94b9c-e15f-4db7-b5e7-3b0ab480463b tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3242064, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.971327] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-078e211b-66c6-4321-b5fe-b9282c8752d9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.979533] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99ae79e2-a74e-4baa-8197-66aa3b247a41 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.015590] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9fac2b8-302a-4e26-b10c-4bab44db200d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.025491] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 0b284e71-7af2-4782-b950-4f7eac5221a4] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 913.032269] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 913.037993] env[69994]: INFO nova.virt.block_device [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Booting with volume f439bb0f-f9f2-4fca-9d5c-9ad196d08d41 at /dev/sda [ 913.041276] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c377694c-0c69-4271-8895-426b7dc0e9de {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.059128] env[69994]: DEBUG nova.compute.provider_tree [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 913.095806] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b4a1776f-856c-48d8-8460-19613eae6b88 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.105353] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c80c80d1-462a-493a-87f6-c2b4b578bfe3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.143632] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1e58b377-e8f6-44de-87b4-a409784b3eec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.152456] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-590d3877-6719-4c58-9608-283e9d54cf95 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.188514] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d56499bb-fca6-4aaf-8755-24c9acabd598 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.195607] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b6b037d-80d5-4ac8-b6be-ceac83ea55ed {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.209113] env[69994]: DEBUG nova.virt.block_device [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Updating existing volume attachment record: 49381524-1920-47a6-8c11-b368c4441d0c {{(pid=69994) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 913.270327] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Acquiring lock "4ca53416-caed-418c-bb40-cabb8e311803" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 913.270663] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Lock "4ca53416-caed-418c-bb40-cabb8e311803" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 913.270880] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Acquiring lock "4ca53416-caed-418c-bb40-cabb8e311803-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 913.271076] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Lock "4ca53416-caed-418c-bb40-cabb8e311803-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 913.271251] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Lock "4ca53416-caed-418c-bb40-cabb8e311803-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 913.275382] env[69994]: INFO nova.compute.manager [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Terminating instance [ 913.395531] env[69994]: DEBUG oslo_vmware.api [None req-37d94b9c-e15f-4db7-b5e7-3b0ab480463b tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3242064, 'name': PowerOffVM_Task, 'duration_secs': 0.189511} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.395814] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-37d94b9c-e15f-4db7-b5e7-3b0ab480463b tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 913.397166] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-37d94b9c-e15f-4db7-b5e7-3b0ab480463b tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 913.399598] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9d0d86fd-48b0-43ed-91ef-74b72b599703 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.534251] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 214b3508-6fb9-455e-be6b-bd9f6902b7ae] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 913.567217] env[69994]: DEBUG nova.scheduler.client.report [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 913.779369] env[69994]: DEBUG nova.compute.manager [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 913.779611] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 913.781045] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fe3b3c3-e8c7-44e2-8ed5-a1ede11799a9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.791452] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 913.791452] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b6cbfb38-9f79-408f-927b-e6b1fa6fb80a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.798398] env[69994]: DEBUG oslo_vmware.api [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Waiting for the task: (returnval){ [ 913.798398] env[69994]: value = "task-3242066" [ 913.798398] env[69994]: _type = "Task" [ 913.798398] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.809442] env[69994]: DEBUG oslo_vmware.api [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Task: {'id': task-3242066, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.038285] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 558ee84a-731b-4cb1-967d-cf84c8d39718] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 914.070259] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.560s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 914.070795] env[69994]: DEBUG nova.compute.manager [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 914.074078] env[69994]: DEBUG oslo_concurrency.lockutils [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.252s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 914.075523] env[69994]: INFO nova.compute.claims [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 914.178093] env[69994]: DEBUG nova.compute.manager [req-4fbb7014-d698-4f21-9b66-c36f848964c7 req-e328d9cb-c245-40b3-8941-48083f7102ca service nova] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Received event network-vif-plugged-58a0ef78-0177-4996-ba8f-adbf83a9c0e6 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 914.178343] env[69994]: DEBUG oslo_concurrency.lockutils [req-4fbb7014-d698-4f21-9b66-c36f848964c7 req-e328d9cb-c245-40b3-8941-48083f7102ca service nova] Acquiring lock "c14851d2-66c5-4865-ae66-abbe303f0c31-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 914.178551] env[69994]: DEBUG oslo_concurrency.lockutils [req-4fbb7014-d698-4f21-9b66-c36f848964c7 req-e328d9cb-c245-40b3-8941-48083f7102ca service nova] Lock "c14851d2-66c5-4865-ae66-abbe303f0c31-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 914.178806] env[69994]: DEBUG oslo_concurrency.lockutils [req-4fbb7014-d698-4f21-9b66-c36f848964c7 req-e328d9cb-c245-40b3-8941-48083f7102ca service nova] Lock "c14851d2-66c5-4865-ae66-abbe303f0c31-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 914.178910] env[69994]: DEBUG nova.compute.manager [req-4fbb7014-d698-4f21-9b66-c36f848964c7 req-e328d9cb-c245-40b3-8941-48083f7102ca service nova] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] No waiting events found dispatching network-vif-plugged-58a0ef78-0177-4996-ba8f-adbf83a9c0e6 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 914.179690] env[69994]: WARNING nova.compute.manager [req-4fbb7014-d698-4f21-9b66-c36f848964c7 req-e328d9cb-c245-40b3-8941-48083f7102ca service nova] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Received unexpected event network-vif-plugged-58a0ef78-0177-4996-ba8f-adbf83a9c0e6 for instance with vm_state building and task_state block_device_mapping. [ 914.310078] env[69994]: DEBUG oslo_vmware.api [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Task: {'id': task-3242066, 'name': PowerOffVM_Task, 'duration_secs': 0.187053} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.310443] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 914.310525] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 914.311389] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e725f848-0291-417a-9aea-879518c3b476 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.324748] env[69994]: DEBUG nova.network.neutron [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Successfully updated port: 58a0ef78-0177-4996-ba8f-adbf83a9c0e6 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 914.341374] env[69994]: DEBUG oslo_concurrency.lockutils [None req-286de424-ef7a-47bd-b88c-8b3a6202c4a6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquiring lock "ff645ae7-940e-4842-8915-a96d36d08067" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 914.341672] env[69994]: DEBUG oslo_concurrency.lockutils [None req-286de424-ef7a-47bd-b88c-8b3a6202c4a6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "ff645ae7-940e-4842-8915-a96d36d08067" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 914.341918] env[69994]: DEBUG oslo_concurrency.lockutils [None req-286de424-ef7a-47bd-b88c-8b3a6202c4a6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquiring lock "ff645ae7-940e-4842-8915-a96d36d08067-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 914.342168] env[69994]: DEBUG oslo_concurrency.lockutils [None req-286de424-ef7a-47bd-b88c-8b3a6202c4a6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "ff645ae7-940e-4842-8915-a96d36d08067-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 914.342356] env[69994]: DEBUG oslo_concurrency.lockutils [None req-286de424-ef7a-47bd-b88c-8b3a6202c4a6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "ff645ae7-940e-4842-8915-a96d36d08067-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 914.344976] env[69994]: INFO nova.compute.manager [None req-286de424-ef7a-47bd-b88c-8b3a6202c4a6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Terminating instance [ 914.372912] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 914.373163] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 914.373383] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Deleting the datastore file [datastore2] 4ca53416-caed-418c-bb40-cabb8e311803 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 914.373728] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a0afbb92-e004-4109-8186-cbafc05c6008 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.380618] env[69994]: DEBUG oslo_vmware.api [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Waiting for the task: (returnval){ [ 914.380618] env[69994]: value = "task-3242068" [ 914.380618] env[69994]: _type = "Task" [ 914.380618] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.388670] env[69994]: DEBUG oslo_vmware.api [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Task: {'id': task-3242068, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.542254] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 5f672fd4-b96f-4506-aa1e-96692a00cb43] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 914.580530] env[69994]: DEBUG nova.compute.utils [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 914.584221] env[69994]: DEBUG nova.compute.manager [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 914.584410] env[69994]: DEBUG nova.network.neutron [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 914.644795] env[69994]: DEBUG nova.policy [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '64b979ffffc94e09bf911bdb89f4796a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ccb64f97e46a4e499df974959db53dcd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 914.832256] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Acquiring lock "refresh_cache-c14851d2-66c5-4865-ae66-abbe303f0c31" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.832256] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Acquired lock "refresh_cache-c14851d2-66c5-4865-ae66-abbe303f0c31" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 914.832256] env[69994]: DEBUG nova.network.neutron [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 914.849376] env[69994]: DEBUG nova.compute.manager [None req-286de424-ef7a-47bd-b88c-8b3a6202c4a6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 914.849670] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-286de424-ef7a-47bd-b88c-8b3a6202c4a6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 914.851176] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82768f43-8ccf-445a-b026-1ac3ac02ae46 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.862081] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-286de424-ef7a-47bd-b88c-8b3a6202c4a6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 914.862367] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1e5f3967-3827-42a2-baeb-b4d7f88f5908 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.867733] env[69994]: DEBUG oslo_vmware.api [None req-286de424-ef7a-47bd-b88c-8b3a6202c4a6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 914.867733] env[69994]: value = "task-3242069" [ 914.867733] env[69994]: _type = "Task" [ 914.867733] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.876845] env[69994]: DEBUG oslo_vmware.api [None req-286de424-ef7a-47bd-b88c-8b3a6202c4a6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242069, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.891674] env[69994]: DEBUG oslo_vmware.api [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Task: {'id': task-3242068, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.35872} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.892522] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 914.892522] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 914.892522] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 914.892522] env[69994]: INFO nova.compute.manager [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Took 1.11 seconds to destroy the instance on the hypervisor. [ 914.892869] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 914.896336] env[69994]: DEBUG nova.compute.manager [-] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 914.896336] env[69994]: DEBUG nova.network.neutron [-] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 915.045786] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 3c2c8a40-919d-4280-b9be-f8d95b1a263e] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 915.085177] env[69994]: DEBUG nova.compute.manager [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 915.091019] env[69994]: DEBUG nova.network.neutron [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Successfully created port: 75af8d87-ecba-45ba-867a-8c8e9c0389c9 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 915.301508] env[69994]: DEBUG nova.compute.manager [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 915.302154] env[69994]: DEBUG nova.virt.hardware [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 915.302313] env[69994]: DEBUG nova.virt.hardware [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 915.302533] env[69994]: DEBUG nova.virt.hardware [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 915.302720] env[69994]: DEBUG nova.virt.hardware [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 915.302791] env[69994]: DEBUG nova.virt.hardware [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 915.303016] env[69994]: DEBUG nova.virt.hardware [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 915.303146] env[69994]: DEBUG nova.virt.hardware [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 915.303298] env[69994]: DEBUG nova.virt.hardware [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 915.303466] env[69994]: DEBUG nova.virt.hardware [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 915.303629] env[69994]: DEBUG nova.virt.hardware [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 915.303802] env[69994]: DEBUG nova.virt.hardware [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 915.306975] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e28d42ff-7ffa-4fa9-b83d-d0303983524a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.318443] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65e1e478-7cb2-4965-aa06-76ca4539910c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.366120] env[69994]: DEBUG nova.network.neutron [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 915.375362] env[69994]: DEBUG nova.compute.manager [req-4d9b7d38-f010-4a9c-ad23-3cad0f660e78 req-ba700ff0-8788-4b60-9ed6-5c96219cbba7 service nova] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Received event network-vif-deleted-958fcea0-f2d3-40f0-9433-fefae2a2245d {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 915.375362] env[69994]: INFO nova.compute.manager [req-4d9b7d38-f010-4a9c-ad23-3cad0f660e78 req-ba700ff0-8788-4b60-9ed6-5c96219cbba7 service nova] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Neutron deleted interface 958fcea0-f2d3-40f0-9433-fefae2a2245d; detaching it from the instance and deleting it from the info cache [ 915.375570] env[69994]: DEBUG nova.network.neutron [req-4d9b7d38-f010-4a9c-ad23-3cad0f660e78 req-ba700ff0-8788-4b60-9ed6-5c96219cbba7 service nova] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.380353] env[69994]: DEBUG oslo_vmware.api [None req-286de424-ef7a-47bd-b88c-8b3a6202c4a6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242069, 'name': PowerOffVM_Task, 'duration_secs': 0.20379} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.380742] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-286de424-ef7a-47bd-b88c-8b3a6202c4a6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 915.381029] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-286de424-ef7a-47bd-b88c-8b3a6202c4a6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 915.381172] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5f2ad04c-6398-4f35-a48b-544d35048bc3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.445051] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-286de424-ef7a-47bd-b88c-8b3a6202c4a6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 915.445344] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-286de424-ef7a-47bd-b88c-8b3a6202c4a6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 915.445550] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-286de424-ef7a-47bd-b88c-8b3a6202c4a6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Deleting the datastore file [datastore2] ff645ae7-940e-4842-8915-a96d36d08067 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 915.446230] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cb7e1263-bf1e-4ba4-b24d-8c86afdac5d0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.455216] env[69994]: DEBUG oslo_vmware.api [None req-286de424-ef7a-47bd-b88c-8b3a6202c4a6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 915.455216] env[69994]: value = "task-3242071" [ 915.455216] env[69994]: _type = "Task" [ 915.455216] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.466130] env[69994]: DEBUG oslo_vmware.api [None req-286de424-ef7a-47bd-b88c-8b3a6202c4a6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242071, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.523790] env[69994]: DEBUG nova.network.neutron [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Updating instance_info_cache with network_info: [{"id": "58a0ef78-0177-4996-ba8f-adbf83a9c0e6", "address": "fa:16:3e:db:90:66", "network": {"id": "61112f44-6155-452d-9ba0-0800f3ec9a9e", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-2056121607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aacff9947d8149e084d2b17f07f3d24c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58a0ef78-01", "ovs_interfaceid": "58a0ef78-0177-4996-ba8f-adbf83a9c0e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.551437] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 289cbcc2-cd8f-4c4f-9169-a897f5527de1] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 915.556252] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7a6ef84-fa1f-4310-8d7f-fd802e0a3bc7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.565393] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9a418d3-7d4c-4d1b-8f87-96a176ab5f49 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.603165] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63bd3611-aa5f-425e-8361-109688ef76c4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.612738] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-848caac0-0de3-487d-8f3c-e93458920475 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.626787] env[69994]: DEBUG nova.compute.provider_tree [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 915.644331] env[69994]: DEBUG nova.network.neutron [-] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.681072] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-37d94b9c-e15f-4db7-b5e7-3b0ab480463b tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 915.681072] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-37d94b9c-e15f-4db7-b5e7-3b0ab480463b tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 915.681161] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-37d94b9c-e15f-4db7-b5e7-3b0ab480463b tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Deleting the datastore file [datastore1] f07750f5-3f1d-4d97-98dc-285ed357cc7e {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 915.685016] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-74e8f0ea-635a-48be-9a9d-109cacb7112f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.688647] env[69994]: DEBUG oslo_vmware.api [None req-37d94b9c-e15f-4db7-b5e7-3b0ab480463b tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for the task: (returnval){ [ 915.688647] env[69994]: value = "task-3242072" [ 915.688647] env[69994]: _type = "Task" [ 915.688647] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.697034] env[69994]: DEBUG oslo_vmware.api [None req-37d94b9c-e15f-4db7-b5e7-3b0ab480463b tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3242072, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.882216] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1b878a68-7936-47df-9437-9f5a84c7bdf2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.891964] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-489c1b3e-2b1b-4853-ad91-271d6b73cb2e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.925286] env[69994]: DEBUG nova.compute.manager [req-4d9b7d38-f010-4a9c-ad23-3cad0f660e78 req-ba700ff0-8788-4b60-9ed6-5c96219cbba7 service nova] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Detach interface failed, port_id=958fcea0-f2d3-40f0-9433-fefae2a2245d, reason: Instance 4ca53416-caed-418c-bb40-cabb8e311803 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 915.964180] env[69994]: DEBUG oslo_vmware.api [None req-286de424-ef7a-47bd-b88c-8b3a6202c4a6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242071, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.340284} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.964447] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-286de424-ef7a-47bd-b88c-8b3a6202c4a6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 915.964621] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-286de424-ef7a-47bd-b88c-8b3a6202c4a6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 915.964801] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-286de424-ef7a-47bd-b88c-8b3a6202c4a6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 915.965040] env[69994]: INFO nova.compute.manager [None req-286de424-ef7a-47bd-b88c-8b3a6202c4a6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Took 1.12 seconds to destroy the instance on the hypervisor. [ 915.965288] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-286de424-ef7a-47bd-b88c-8b3a6202c4a6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 915.965473] env[69994]: DEBUG nova.compute.manager [-] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 915.966049] env[69994]: DEBUG nova.network.neutron [-] [instance: ff645ae7-940e-4842-8915-a96d36d08067] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 916.026973] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Releasing lock "refresh_cache-c14851d2-66c5-4865-ae66-abbe303f0c31" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 916.027368] env[69994]: DEBUG nova.compute.manager [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Instance network_info: |[{"id": "58a0ef78-0177-4996-ba8f-adbf83a9c0e6", "address": "fa:16:3e:db:90:66", "network": {"id": "61112f44-6155-452d-9ba0-0800f3ec9a9e", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-2056121607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aacff9947d8149e084d2b17f07f3d24c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58a0ef78-01", "ovs_interfaceid": "58a0ef78-0177-4996-ba8f-adbf83a9c0e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 916.027753] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:90:66', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '572b7281-aad3-45fa-9cb2-fc1c70569948', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '58a0ef78-0177-4996-ba8f-adbf83a9c0e6', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 916.036387] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Creating folder: Project (aacff9947d8149e084d2b17f07f3d24c). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 916.036666] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-807ad7f1-9311-4d56-a6ee-4dee29b2f933 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.050187] env[69994]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 916.050367] env[69994]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=69994) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 916.050679] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Folder already exists: Project (aacff9947d8149e084d2b17f07f3d24c). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 916.050867] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Creating folder: Instances. Parent ref: group-v647902. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 916.051116] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b2881134-e7b4-4fcc-8fc9-8eb034fd6b05 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.056305] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 744fe018-d12c-44c2-98f1-c11fbfffc98e] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 916.060176] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Created folder: Instances in parent group-v647902. [ 916.060399] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 916.060576] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 916.060769] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f7c04172-09e7-4bcd-9962-a1e572eb8ca4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.080810] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 916.080810] env[69994]: value = "task-3242075" [ 916.080810] env[69994]: _type = "Task" [ 916.080810] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.088517] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242075, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.104201] env[69994]: DEBUG nova.compute.manager [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 916.131160] env[69994]: DEBUG nova.virt.hardware [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 916.131454] env[69994]: DEBUG nova.virt.hardware [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 916.131641] env[69994]: DEBUG nova.virt.hardware [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 916.132513] env[69994]: DEBUG nova.virt.hardware [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 916.132513] env[69994]: DEBUG nova.virt.hardware [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 916.132513] env[69994]: DEBUG nova.virt.hardware [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 916.132513] env[69994]: DEBUG nova.virt.hardware [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 916.132513] env[69994]: DEBUG nova.virt.hardware [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 916.132696] env[69994]: DEBUG nova.virt.hardware [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 916.132880] env[69994]: DEBUG nova.virt.hardware [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 916.133070] env[69994]: DEBUG nova.virt.hardware [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 916.134093] env[69994]: DEBUG nova.scheduler.client.report [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 916.138632] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ccf1260-3665-49bb-b157-85955f66218f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.146367] env[69994]: INFO nova.compute.manager [-] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Took 1.25 seconds to deallocate network for instance. [ 916.149363] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f7b71e9-2042-4cd8-83cf-3e1f213bfa5d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.200169] env[69994]: DEBUG oslo_vmware.api [None req-37d94b9c-e15f-4db7-b5e7-3b0ab480463b tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3242072, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.462888} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.201656] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-37d94b9c-e15f-4db7-b5e7-3b0ab480463b tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 916.201849] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-37d94b9c-e15f-4db7-b5e7-3b0ab480463b tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 916.202187] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-37d94b9c-e15f-4db7-b5e7-3b0ab480463b tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 916.202268] env[69994]: INFO nova.compute.manager [None req-37d94b9c-e15f-4db7-b5e7-3b0ab480463b tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Took 3.34 seconds to destroy the instance on the hypervisor. [ 916.202450] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-37d94b9c-e15f-4db7-b5e7-3b0ab480463b tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 916.203664] env[69994]: DEBUG nova.compute.manager [-] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 916.203770] env[69994]: DEBUG nova.network.neutron [-] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 916.206822] env[69994]: DEBUG nova.compute.manager [req-34b33702-81f3-465e-b2f3-a90f68fb89ae req-72001189-4260-4f37-a672-3b6e1138cb50 service nova] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Received event network-changed-58a0ef78-0177-4996-ba8f-adbf83a9c0e6 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 916.206822] env[69994]: DEBUG nova.compute.manager [req-34b33702-81f3-465e-b2f3-a90f68fb89ae req-72001189-4260-4f37-a672-3b6e1138cb50 service nova] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Refreshing instance network info cache due to event network-changed-58a0ef78-0177-4996-ba8f-adbf83a9c0e6. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 916.206822] env[69994]: DEBUG oslo_concurrency.lockutils [req-34b33702-81f3-465e-b2f3-a90f68fb89ae req-72001189-4260-4f37-a672-3b6e1138cb50 service nova] Acquiring lock "refresh_cache-c14851d2-66c5-4865-ae66-abbe303f0c31" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.206822] env[69994]: DEBUG oslo_concurrency.lockutils [req-34b33702-81f3-465e-b2f3-a90f68fb89ae req-72001189-4260-4f37-a672-3b6e1138cb50 service nova] Acquired lock "refresh_cache-c14851d2-66c5-4865-ae66-abbe303f0c31" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 916.207022] env[69994]: DEBUG nova.network.neutron [req-34b33702-81f3-465e-b2f3-a90f68fb89ae req-72001189-4260-4f37-a672-3b6e1138cb50 service nova] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Refreshing network info cache for port 58a0ef78-0177-4996-ba8f-adbf83a9c0e6 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 916.559696] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: ce6f9a88-faa8-442e-8b48-64979dd2d03e] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 916.595199] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242075, 'name': CreateVM_Task, 'duration_secs': 0.487535} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.595385] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 916.596116] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'attachment_id': '49381524-1920-47a6-8c11-b368c4441d0c', 'disk_bus': None, 'guest_format': None, 'delete_on_termination': True, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647906', 'volume_id': 'f439bb0f-f9f2-4fca-9d5c-9ad196d08d41', 'name': 'volume-f439bb0f-f9f2-4fca-9d5c-9ad196d08d41', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c14851d2-66c5-4865-ae66-abbe303f0c31', 'attached_at': '', 'detached_at': '', 'volume_id': 'f439bb0f-f9f2-4fca-9d5c-9ad196d08d41', 'serial': 'f439bb0f-f9f2-4fca-9d5c-9ad196d08d41'}, 'mount_device': '/dev/sda', 'boot_index': 0, 'device_type': None, 'volume_type': None}], 'swap': None} {{(pid=69994) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 916.596327] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Root volume attach. Driver type: vmdk {{(pid=69994) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 916.597218] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88e5acda-7a07-4864-be6a-e715367ff6e7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.609403] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69603be0-0b54-4eb5-8dbe-09357f817437 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.616591] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68199faa-1c8d-4433-9380-2b5a53b13a85 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.622487] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-c3053403-c929-4b34-b0a2-98ac49fb5dfb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.629493] env[69994]: DEBUG oslo_vmware.api [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Waiting for the task: (returnval){ [ 916.629493] env[69994]: value = "task-3242076" [ 916.629493] env[69994]: _type = "Task" [ 916.629493] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.637608] env[69994]: DEBUG oslo_vmware.api [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Task: {'id': task-3242076, 'name': RelocateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.642441] env[69994]: DEBUG oslo_concurrency.lockutils [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.568s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.642930] env[69994]: DEBUG nova.compute.manager [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 916.645479] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.357s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 916.646917] env[69994]: INFO nova.compute.claims [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 916.659972] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 916.703916] env[69994]: DEBUG nova.network.neutron [-] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.776090] env[69994]: DEBUG nova.network.neutron [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Successfully updated port: 75af8d87-ecba-45ba-867a-8c8e9c0389c9 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 916.921054] env[69994]: DEBUG nova.network.neutron [req-34b33702-81f3-465e-b2f3-a90f68fb89ae req-72001189-4260-4f37-a672-3b6e1138cb50 service nova] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Updated VIF entry in instance network info cache for port 58a0ef78-0177-4996-ba8f-adbf83a9c0e6. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 916.921530] env[69994]: DEBUG nova.network.neutron [req-34b33702-81f3-465e-b2f3-a90f68fb89ae req-72001189-4260-4f37-a672-3b6e1138cb50 service nova] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Updating instance_info_cache with network_info: [{"id": "58a0ef78-0177-4996-ba8f-adbf83a9c0e6", "address": "fa:16:3e:db:90:66", "network": {"id": "61112f44-6155-452d-9ba0-0800f3ec9a9e", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-2056121607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aacff9947d8149e084d2b17f07f3d24c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58a0ef78-01", "ovs_interfaceid": "58a0ef78-0177-4996-ba8f-adbf83a9c0e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.941908] env[69994]: DEBUG nova.network.neutron [-] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.065356] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: e8b4640f-302d-43cd-a654-c42f9cb34766] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 917.143825] env[69994]: DEBUG oslo_vmware.api [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Task: {'id': task-3242076, 'name': RelocateVM_Task} progress is 42%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.152673] env[69994]: DEBUG nova.compute.utils [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 917.155376] env[69994]: DEBUG nova.compute.manager [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 917.155720] env[69994]: DEBUG nova.network.neutron [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 917.195293] env[69994]: DEBUG nova.policy [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '42976cbf12d645ee8bbedf58c7d07603', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '16b66dfea80140689fa05c54842cdf96', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 917.205919] env[69994]: INFO nova.compute.manager [-] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Took 1.24 seconds to deallocate network for instance. [ 917.281462] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "refresh_cache-ef37ce64-2c26-4080-899a-6d9dbb5850c9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.281596] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquired lock "refresh_cache-ef37ce64-2c26-4080-899a-6d9dbb5850c9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 917.281745] env[69994]: DEBUG nova.network.neutron [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 917.428569] env[69994]: DEBUG oslo_concurrency.lockutils [req-34b33702-81f3-465e-b2f3-a90f68fb89ae req-72001189-4260-4f37-a672-3b6e1138cb50 service nova] Releasing lock "refresh_cache-c14851d2-66c5-4865-ae66-abbe303f0c31" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 917.431384] env[69994]: DEBUG nova.compute.manager [req-637173a2-7f11-4f1d-a3bc-c8200ac3eae2 req-5a2953d6-4e17-481f-82a6-c464aab83315 service nova] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Received event network-vif-deleted-d2919329-57fe-4483-b8d9-754310db51d9 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 917.431731] env[69994]: DEBUG nova.compute.manager [req-637173a2-7f11-4f1d-a3bc-c8200ac3eae2 req-5a2953d6-4e17-481f-82a6-c464aab83315 service nova] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Received event network-vif-deleted-03a2cce0-4737-45b4-8482-4eabd0e63386 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 917.444230] env[69994]: INFO nova.compute.manager [-] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Took 1.24 seconds to deallocate network for instance. [ 917.512494] env[69994]: DEBUG nova.network.neutron [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Successfully created port: ea997e7f-79b4-40ab-bd44-2882bc9ec8fa {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 917.569143] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 493c2d85-eef5-44ae-acfc-2744685135ca] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 917.645974] env[69994]: DEBUG oslo_vmware.api [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Task: {'id': task-3242076, 'name': RelocateVM_Task} progress is 54%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.655881] env[69994]: DEBUG nova.compute.manager [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 917.713410] env[69994]: DEBUG oslo_concurrency.lockutils [None req-286de424-ef7a-47bd-b88c-8b3a6202c4a6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 917.824803] env[69994]: DEBUG nova.network.neutron [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 917.953908] env[69994]: DEBUG oslo_concurrency.lockutils [None req-37d94b9c-e15f-4db7-b5e7-3b0ab480463b tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 918.037633] env[69994]: DEBUG nova.network.neutron [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Updating instance_info_cache with network_info: [{"id": "75af8d87-ecba-45ba-867a-8c8e9c0389c9", "address": "fa:16:3e:5c:3b:72", "network": {"id": "84a79aab-0d1e-4aaf-9422-316cb7d239fb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1732515887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccb64f97e46a4e499df974959db53dcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75af8d87-ec", "ovs_interfaceid": "75af8d87-ecba-45ba-867a-8c8e9c0389c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.076537] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 153f0ead-6e2f-4077-b86a-00d3a1114fed] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 918.084568] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b23d3774-3c9f-4f4b-af0f-486f50128c9b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.095984] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4578cfd4-52a6-4ffd-b7a0-3f53a2f4239a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.136646] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81e89c62-2d7f-4b6e-a4ec-d754646dcff8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.151830] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41910d5a-0822-4205-a625-e157b489eba9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.155317] env[69994]: DEBUG oslo_vmware.api [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Task: {'id': task-3242076, 'name': RelocateVM_Task} progress is 67%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.172979] env[69994]: DEBUG nova.compute.provider_tree [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 918.236392] env[69994]: DEBUG nova.compute.manager [req-245829ca-f7d7-4145-a73d-7c0d4ed4b48a req-ad17c3b6-1147-49d5-a58c-2bbbee418726 service nova] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Received event network-vif-plugged-75af8d87-ecba-45ba-867a-8c8e9c0389c9 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 918.236623] env[69994]: DEBUG oslo_concurrency.lockutils [req-245829ca-f7d7-4145-a73d-7c0d4ed4b48a req-ad17c3b6-1147-49d5-a58c-2bbbee418726 service nova] Acquiring lock "ef37ce64-2c26-4080-899a-6d9dbb5850c9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 918.236835] env[69994]: DEBUG oslo_concurrency.lockutils [req-245829ca-f7d7-4145-a73d-7c0d4ed4b48a req-ad17c3b6-1147-49d5-a58c-2bbbee418726 service nova] Lock "ef37ce64-2c26-4080-899a-6d9dbb5850c9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 918.237047] env[69994]: DEBUG oslo_concurrency.lockutils [req-245829ca-f7d7-4145-a73d-7c0d4ed4b48a req-ad17c3b6-1147-49d5-a58c-2bbbee418726 service nova] Lock "ef37ce64-2c26-4080-899a-6d9dbb5850c9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 918.237193] env[69994]: DEBUG nova.compute.manager [req-245829ca-f7d7-4145-a73d-7c0d4ed4b48a req-ad17c3b6-1147-49d5-a58c-2bbbee418726 service nova] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] No waiting events found dispatching network-vif-plugged-75af8d87-ecba-45ba-867a-8c8e9c0389c9 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 918.237379] env[69994]: WARNING nova.compute.manager [req-245829ca-f7d7-4145-a73d-7c0d4ed4b48a req-ad17c3b6-1147-49d5-a58c-2bbbee418726 service nova] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Received unexpected event network-vif-plugged-75af8d87-ecba-45ba-867a-8c8e9c0389c9 for instance with vm_state building and task_state spawning. [ 918.237506] env[69994]: DEBUG nova.compute.manager [req-245829ca-f7d7-4145-a73d-7c0d4ed4b48a req-ad17c3b6-1147-49d5-a58c-2bbbee418726 service nova] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Received event network-changed-75af8d87-ecba-45ba-867a-8c8e9c0389c9 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 918.237662] env[69994]: DEBUG nova.compute.manager [req-245829ca-f7d7-4145-a73d-7c0d4ed4b48a req-ad17c3b6-1147-49d5-a58c-2bbbee418726 service nova] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Refreshing instance network info cache due to event network-changed-75af8d87-ecba-45ba-867a-8c8e9c0389c9. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 918.237831] env[69994]: DEBUG oslo_concurrency.lockutils [req-245829ca-f7d7-4145-a73d-7c0d4ed4b48a req-ad17c3b6-1147-49d5-a58c-2bbbee418726 service nova] Acquiring lock "refresh_cache-ef37ce64-2c26-4080-899a-6d9dbb5850c9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.540125] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Releasing lock "refresh_cache-ef37ce64-2c26-4080-899a-6d9dbb5850c9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 918.540502] env[69994]: DEBUG nova.compute.manager [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Instance network_info: |[{"id": "75af8d87-ecba-45ba-867a-8c8e9c0389c9", "address": "fa:16:3e:5c:3b:72", "network": {"id": "84a79aab-0d1e-4aaf-9422-316cb7d239fb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1732515887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccb64f97e46a4e499df974959db53dcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75af8d87-ec", "ovs_interfaceid": "75af8d87-ecba-45ba-867a-8c8e9c0389c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 918.540832] env[69994]: DEBUG oslo_concurrency.lockutils [req-245829ca-f7d7-4145-a73d-7c0d4ed4b48a req-ad17c3b6-1147-49d5-a58c-2bbbee418726 service nova] Acquired lock "refresh_cache-ef37ce64-2c26-4080-899a-6d9dbb5850c9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 918.541030] env[69994]: DEBUG nova.network.neutron [req-245829ca-f7d7-4145-a73d-7c0d4ed4b48a req-ad17c3b6-1147-49d5-a58c-2bbbee418726 service nova] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Refreshing network info cache for port 75af8d87-ecba-45ba-867a-8c8e9c0389c9 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 918.542359] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5c:3b:72', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c2383c27-232e-4745-9b0a-2dcbaabb188b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '75af8d87-ecba-45ba-867a-8c8e9c0389c9', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 918.550083] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Creating folder: Project (ccb64f97e46a4e499df974959db53dcd). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 918.553221] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-52081d7f-a420-4b97-b14d-14318112d1d3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.570643] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Created folder: Project (ccb64f97e46a4e499df974959db53dcd) in parent group-v647729. [ 918.570973] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Creating folder: Instances. Parent ref: group-v647932. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 918.571488] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d2ce34bb-b775-435d-adcb-2e5451d49ceb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.580540] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: e4013007-fd79-4d70-a9d1-70a4c621c0ea] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 918.586533] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Created folder: Instances in parent group-v647932. [ 918.586792] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 918.587331] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 918.587935] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-91ed2556-96da-4a0b-bb9e-f6f8d8cb0cc8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.608883] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 918.608883] env[69994]: value = "task-3242079" [ 918.608883] env[69994]: _type = "Task" [ 918.608883] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.623274] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242079, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.647457] env[69994]: DEBUG oslo_vmware.api [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Task: {'id': task-3242076, 'name': RelocateVM_Task} progress is 82%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.676015] env[69994]: DEBUG nova.compute.manager [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 918.680054] env[69994]: DEBUG nova.scheduler.client.report [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 918.709041] env[69994]: DEBUG nova.virt.hardware [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 918.709397] env[69994]: DEBUG nova.virt.hardware [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 918.709629] env[69994]: DEBUG nova.virt.hardware [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 918.709857] env[69994]: DEBUG nova.virt.hardware [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 918.710044] env[69994]: DEBUG nova.virt.hardware [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 918.710230] env[69994]: DEBUG nova.virt.hardware [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 918.710468] env[69994]: DEBUG nova.virt.hardware [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 918.710850] env[69994]: DEBUG nova.virt.hardware [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 918.710850] env[69994]: DEBUG nova.virt.hardware [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 918.711085] env[69994]: DEBUG nova.virt.hardware [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 918.711265] env[69994]: DEBUG nova.virt.hardware [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 918.712179] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d0a51aa-4c17-48a8-bd13-e5617decbf14 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.724350] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a76749b6-9cb2-43bb-be2d-e7fc782e104e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.884406] env[69994]: DEBUG nova.network.neutron [req-245829ca-f7d7-4145-a73d-7c0d4ed4b48a req-ad17c3b6-1147-49d5-a58c-2bbbee418726 service nova] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Updated VIF entry in instance network info cache for port 75af8d87-ecba-45ba-867a-8c8e9c0389c9. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 918.884834] env[69994]: DEBUG nova.network.neutron [req-245829ca-f7d7-4145-a73d-7c0d4ed4b48a req-ad17c3b6-1147-49d5-a58c-2bbbee418726 service nova] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Updating instance_info_cache with network_info: [{"id": "75af8d87-ecba-45ba-867a-8c8e9c0389c9", "address": "fa:16:3e:5c:3b:72", "network": {"id": "84a79aab-0d1e-4aaf-9422-316cb7d239fb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1732515887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccb64f97e46a4e499df974959db53dcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75af8d87-ec", "ovs_interfaceid": "75af8d87-ecba-45ba-867a-8c8e9c0389c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.084059] env[69994]: DEBUG nova.network.neutron [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Successfully updated port: ea997e7f-79b4-40ab-bd44-2882bc9ec8fa {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 919.088060] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 0bfe4393-5b2a-487f-ba7a-858ed4c861a5] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 919.120678] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242079, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.142919] env[69994]: DEBUG oslo_vmware.api [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Task: {'id': task-3242076, 'name': RelocateVM_Task} progress is 95%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.187595] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.542s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 919.188339] env[69994]: DEBUG nova.compute.manager [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 919.191138] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 35.842s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 919.191332] env[69994]: DEBUG nova.objects.instance [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69994) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 919.387744] env[69994]: DEBUG oslo_concurrency.lockutils [req-245829ca-f7d7-4145-a73d-7c0d4ed4b48a req-ad17c3b6-1147-49d5-a58c-2bbbee418726 service nova] Releasing lock "refresh_cache-ef37ce64-2c26-4080-899a-6d9dbb5850c9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 919.459648] env[69994]: DEBUG nova.compute.manager [req-605dea85-a826-4317-b783-6f04dbb107f0 req-af0f15f1-5ad4-442d-8d98-5f35c71170a1 service nova] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Received event network-vif-plugged-ea997e7f-79b4-40ab-bd44-2882bc9ec8fa {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 919.459754] env[69994]: DEBUG oslo_concurrency.lockutils [req-605dea85-a826-4317-b783-6f04dbb107f0 req-af0f15f1-5ad4-442d-8d98-5f35c71170a1 service nova] Acquiring lock "80705dfe-4768-4f35-8acf-316b15814f78-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 919.459965] env[69994]: DEBUG oslo_concurrency.lockutils [req-605dea85-a826-4317-b783-6f04dbb107f0 req-af0f15f1-5ad4-442d-8d98-5f35c71170a1 service nova] Lock "80705dfe-4768-4f35-8acf-316b15814f78-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 919.460149] env[69994]: DEBUG oslo_concurrency.lockutils [req-605dea85-a826-4317-b783-6f04dbb107f0 req-af0f15f1-5ad4-442d-8d98-5f35c71170a1 service nova] Lock "80705dfe-4768-4f35-8acf-316b15814f78-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 919.460316] env[69994]: DEBUG nova.compute.manager [req-605dea85-a826-4317-b783-6f04dbb107f0 req-af0f15f1-5ad4-442d-8d98-5f35c71170a1 service nova] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] No waiting events found dispatching network-vif-plugged-ea997e7f-79b4-40ab-bd44-2882bc9ec8fa {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 919.460510] env[69994]: WARNING nova.compute.manager [req-605dea85-a826-4317-b783-6f04dbb107f0 req-af0f15f1-5ad4-442d-8d98-5f35c71170a1 service nova] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Received unexpected event network-vif-plugged-ea997e7f-79b4-40ab-bd44-2882bc9ec8fa for instance with vm_state building and task_state spawning. [ 919.460675] env[69994]: DEBUG nova.compute.manager [req-605dea85-a826-4317-b783-6f04dbb107f0 req-af0f15f1-5ad4-442d-8d98-5f35c71170a1 service nova] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Received event network-changed-ea997e7f-79b4-40ab-bd44-2882bc9ec8fa {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 919.460828] env[69994]: DEBUG nova.compute.manager [req-605dea85-a826-4317-b783-6f04dbb107f0 req-af0f15f1-5ad4-442d-8d98-5f35c71170a1 service nova] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Refreshing instance network info cache due to event network-changed-ea997e7f-79b4-40ab-bd44-2882bc9ec8fa. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 919.461019] env[69994]: DEBUG oslo_concurrency.lockutils [req-605dea85-a826-4317-b783-6f04dbb107f0 req-af0f15f1-5ad4-442d-8d98-5f35c71170a1 service nova] Acquiring lock "refresh_cache-80705dfe-4768-4f35-8acf-316b15814f78" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.461160] env[69994]: DEBUG oslo_concurrency.lockutils [req-605dea85-a826-4317-b783-6f04dbb107f0 req-af0f15f1-5ad4-442d-8d98-5f35c71170a1 service nova] Acquired lock "refresh_cache-80705dfe-4768-4f35-8acf-316b15814f78" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 919.461313] env[69994]: DEBUG nova.network.neutron [req-605dea85-a826-4317-b783-6f04dbb107f0 req-af0f15f1-5ad4-442d-8d98-5f35c71170a1 service nova] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Refreshing network info cache for port ea997e7f-79b4-40ab-bd44-2882bc9ec8fa {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 919.586327] env[69994]: DEBUG oslo_concurrency.lockutils [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Acquiring lock "refresh_cache-80705dfe-4768-4f35-8acf-316b15814f78" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.590981] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 6e8286a3-6fd1-44ee-a5ca-b21f3178334d] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 919.620700] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242079, 'name': CreateVM_Task, 'duration_secs': 0.515169} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.620878] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 919.621573] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.621753] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 919.622078] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 919.622369] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-920cf5f4-8282-4e37-ac0b-0581da955258 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.628536] env[69994]: DEBUG oslo_vmware.api [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 919.628536] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5209680a-fae7-2558-83cb-9f485a6da292" [ 919.628536] env[69994]: _type = "Task" [ 919.628536] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.637548] env[69994]: DEBUG oslo_vmware.api [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5209680a-fae7-2558-83cb-9f485a6da292, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.645864] env[69994]: DEBUG oslo_vmware.api [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Task: {'id': task-3242076, 'name': RelocateVM_Task} progress is 97%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.695280] env[69994]: DEBUG nova.compute.utils [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 919.696855] env[69994]: DEBUG nova.compute.manager [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Not allocating networking since 'none' was specified. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 919.996232] env[69994]: DEBUG nova.network.neutron [req-605dea85-a826-4317-b783-6f04dbb107f0 req-af0f15f1-5ad4-442d-8d98-5f35c71170a1 service nova] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 920.076964] env[69994]: DEBUG nova.network.neutron [req-605dea85-a826-4317-b783-6f04dbb107f0 req-af0f15f1-5ad4-442d-8d98-5f35c71170a1 service nova] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 920.094751] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: cef66a67-e3ac-40dc-a8a4-0375bd64c484] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 920.140199] env[69994]: DEBUG oslo_vmware.api [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5209680a-fae7-2558-83cb-9f485a6da292, 'name': SearchDatastore_Task, 'duration_secs': 0.086281} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.143321] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 920.143565] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 920.143817] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 920.143966] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 920.144169] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 920.144441] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a977478f-f387-4156-8189-a15c60923a7d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.153392] env[69994]: DEBUG oslo_vmware.api [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Task: {'id': task-3242076, 'name': RelocateVM_Task} progress is 97%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.163154] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 920.163352] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 920.164128] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2eb60234-f64d-477f-b53c-91f0fc6c9941 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.171018] env[69994]: DEBUG oslo_vmware.api [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 920.171018] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52e8e1c1-e239-1ac9-e59e-26e95f367c8e" [ 920.171018] env[69994]: _type = "Task" [ 920.171018] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.180403] env[69994]: DEBUG oslo_vmware.api [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52e8e1c1-e239-1ac9-e59e-26e95f367c8e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.200720] env[69994]: DEBUG nova.compute.manager [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 920.204205] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2aa31054-30c7-4b43-9174-f3a91f5769b9 tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 920.205502] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b190e198-efbf-4a92-a3fe-4632d3f3c407 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.242s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 920.205668] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b190e198-efbf-4a92-a3fe-4632d3f3c407 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 920.207840] env[69994]: DEBUG oslo_concurrency.lockutils [None req-20cc1e2c-f898-41fd-9a1f-b5444a25609d tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.617s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 920.208107] env[69994]: DEBUG oslo_concurrency.lockutils [None req-20cc1e2c-f898-41fd-9a1f-b5444a25609d tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 920.210091] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.466s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 920.210329] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 920.212194] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc04a935-34f6-4cea-bdb4-6c613f642321 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.624s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 920.212434] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc04a935-34f6-4cea-bdb4-6c613f642321 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 920.214177] env[69994]: DEBUG oslo_concurrency.lockutils [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 16.577s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 920.214418] env[69994]: DEBUG nova.objects.instance [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69994) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 920.244170] env[69994]: INFO nova.scheduler.client.report [None req-b190e198-efbf-4a92-a3fe-4632d3f3c407 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Deleted allocations for instance 7ef329a2-4d61-428a-8a43-f309a1e953d6 [ 920.247568] env[69994]: INFO nova.scheduler.client.report [None req-fc04a935-34f6-4cea-bdb4-6c613f642321 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Deleted allocations for instance 4dbf53e0-caa1-41f4-8376-dfba8d8567cd [ 920.262750] env[69994]: INFO nova.scheduler.client.report [None req-20cc1e2c-f898-41fd-9a1f-b5444a25609d tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Deleted allocations for instance f3268fe1-768c-4d27-828a-5885ce166f90 [ 920.264386] env[69994]: INFO nova.scheduler.client.report [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Deleted allocations for instance 0b975ce0-40a4-48a9-a046-66227636d496 [ 920.579538] env[69994]: DEBUG oslo_concurrency.lockutils [req-605dea85-a826-4317-b783-6f04dbb107f0 req-af0f15f1-5ad4-442d-8d98-5f35c71170a1 service nova] Releasing lock "refresh_cache-80705dfe-4768-4f35-8acf-316b15814f78" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 920.579939] env[69994]: DEBUG oslo_concurrency.lockutils [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Acquired lock "refresh_cache-80705dfe-4768-4f35-8acf-316b15814f78" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 920.580123] env[69994]: DEBUG nova.network.neutron [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 920.597646] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 84bff4c0-9e2e-47f2-a378-70d3c992b58b] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 920.652288] env[69994]: DEBUG oslo_vmware.api [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Task: {'id': task-3242076, 'name': RelocateVM_Task} progress is 98%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.682909] env[69994]: DEBUG oslo_vmware.api [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52e8e1c1-e239-1ac9-e59e-26e95f367c8e, 'name': SearchDatastore_Task, 'duration_secs': 0.01918} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.683758] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-447e1e73-c2ca-4e06-815c-975ae9fa611a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.691027] env[69994]: DEBUG oslo_vmware.api [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 920.691027] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5278ed56-f206-d288-21a2-6cf78f64192b" [ 920.691027] env[69994]: _type = "Task" [ 920.691027] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.701833] env[69994]: DEBUG oslo_vmware.api [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5278ed56-f206-d288-21a2-6cf78f64192b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.756839] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b190e198-efbf-4a92-a3fe-4632d3f3c407 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Lock "7ef329a2-4d61-428a-8a43-f309a1e953d6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.302s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 920.758202] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc04a935-34f6-4cea-bdb4-6c613f642321 tempest-ListServerFiltersTestJSON-30304636 tempest-ListServerFiltersTestJSON-30304636-project-member] Lock "4dbf53e0-caa1-41f4-8376-dfba8d8567cd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.941s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 920.774125] env[69994]: DEBUG oslo_concurrency.lockutils [None req-20cc1e2c-f898-41fd-9a1f-b5444a25609d tempest-ServersListShow2100Test-1567668183 tempest-ServersListShow2100Test-1567668183-project-member] Lock "f3268fe1-768c-4d27-828a-5885ce166f90" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.416s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 920.775370] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e40d5822-d8fd-466e-887a-1cc961f8d90f tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "0b975ce0-40a4-48a9-a046-66227636d496" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.786s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.104855] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: ad957c30-c923-4bbf-8841-00e99de44781] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 921.136839] env[69994]: DEBUG nova.network.neutron [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 921.153170] env[69994]: DEBUG oslo_vmware.api [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Task: {'id': task-3242076, 'name': RelocateVM_Task, 'duration_secs': 4.411274} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.153434] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Volume attach. Driver type: vmdk {{(pid=69994) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 921.153632] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647906', 'volume_id': 'f439bb0f-f9f2-4fca-9d5c-9ad196d08d41', 'name': 'volume-f439bb0f-f9f2-4fca-9d5c-9ad196d08d41', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c14851d2-66c5-4865-ae66-abbe303f0c31', 'attached_at': '', 'detached_at': '', 'volume_id': 'f439bb0f-f9f2-4fca-9d5c-9ad196d08d41', 'serial': 'f439bb0f-f9f2-4fca-9d5c-9ad196d08d41'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 921.154416] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6be27f92-a656-480e-ae39-3ab1ca65e184 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.174771] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-794eb496-f0b5-4396-80e3-4ab43d7db023 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.201278] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] volume-f439bb0f-f9f2-4fca-9d5c-9ad196d08d41/volume-f439bb0f-f9f2-4fca-9d5c-9ad196d08d41.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 921.204804] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0877d94e-19c4-4e79-9bab-e950d08ac055 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.223107] env[69994]: DEBUG nova.compute.manager [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 921.226639] env[69994]: DEBUG oslo_concurrency.lockutils [None req-42b296e9-5f71-4e65-b6a0-1dd0ae4338e7 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.232640] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2f1aa446-0ae9-453c-bc74-b671935b11e0 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.806s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 921.232640] env[69994]: DEBUG nova.objects.instance [None req-2f1aa446-0ae9-453c-bc74-b671935b11e0 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Lazy-loading 'resources' on Instance uuid b003b7c2-e754-440e-8a65-13c5e9c68cd5 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 921.233608] env[69994]: DEBUG oslo_vmware.api [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5278ed56-f206-d288-21a2-6cf78f64192b, 'name': SearchDatastore_Task, 'duration_secs': 0.011927} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.236028] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 921.236028] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] ef37ce64-2c26-4080-899a-6d9dbb5850c9/ef37ce64-2c26-4080-899a-6d9dbb5850c9.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 921.236028] env[69994]: DEBUG oslo_vmware.api [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Waiting for the task: (returnval){ [ 921.236028] env[69994]: value = "task-3242080" [ 921.236028] env[69994]: _type = "Task" [ 921.236028] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.236329] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d9f93754-7ff8-44af-bd0d-fe75fab975f0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.249433] env[69994]: DEBUG oslo_vmware.api [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Task: {'id': task-3242080, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.253056] env[69994]: DEBUG oslo_vmware.api [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 921.253056] env[69994]: value = "task-3242081" [ 921.253056] env[69994]: _type = "Task" [ 921.253056] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.263906] env[69994]: DEBUG nova.virt.hardware [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 921.263906] env[69994]: DEBUG nova.virt.hardware [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 921.264362] env[69994]: DEBUG nova.virt.hardware [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 921.264362] env[69994]: DEBUG nova.virt.hardware [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 921.264362] env[69994]: DEBUG nova.virt.hardware [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 921.264569] env[69994]: DEBUG nova.virt.hardware [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 921.264736] env[69994]: DEBUG nova.virt.hardware [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 921.264942] env[69994]: DEBUG nova.virt.hardware [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 921.265140] env[69994]: DEBUG nova.virt.hardware [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 921.265311] env[69994]: DEBUG nova.virt.hardware [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 921.265512] env[69994]: DEBUG nova.virt.hardware [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 921.266864] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95b9c74c-d2eb-4e4b-b4d6-355f6156b432 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.276615] env[69994]: DEBUG oslo_vmware.api [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242081, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.283118] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d4de615-52e8-4278-85b7-970513bebb69 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.303802] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Instance VIF info [] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 921.308904] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Creating folder: Project (1b449f7d5cdf479eac4e3839141b8f2b). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 921.309343] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4d71dc39-c778-49a0-8668-89b8a53b19c4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.322708] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Created folder: Project (1b449f7d5cdf479eac4e3839141b8f2b) in parent group-v647729. [ 921.323129] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Creating folder: Instances. Parent ref: group-v647935. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 921.323516] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4a527b53-4ecd-4ea2-9793-1f667cf1d756 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.336938] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Created folder: Instances in parent group-v647935. [ 921.337416] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 921.337719] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 921.338017] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d8447239-3261-403a-9f04-9edd4147e691 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.360856] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 921.360856] env[69994]: value = "task-3242084" [ 921.360856] env[69994]: _type = "Task" [ 921.360856] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.372793] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242084, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.415324] env[69994]: DEBUG nova.network.neutron [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Updating instance_info_cache with network_info: [{"id": "ea997e7f-79b4-40ab-bd44-2882bc9ec8fa", "address": "fa:16:3e:4a:cd:4c", "network": {"id": "e0a34d39-6f06-4258-9bc2-7b70aa37964a", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-455935249-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16b66dfea80140689fa05c54842cdf96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b7a73c01-1bb9-4612-a1a7-16d71b732e81", "external-id": "nsx-vlan-transportzone-711", "segmentation_id": 711, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea997e7f-79", "ovs_interfaceid": "ea997e7f-79b4-40ab-bd44-2882bc9ec8fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.611674] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 1d548f54-4ffa-4299-9212-717350558ad4] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 921.766130] env[69994]: DEBUG oslo_vmware.api [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Task: {'id': task-3242080, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.771277] env[69994]: DEBUG oslo_vmware.api [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242081, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.502175} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.774650] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] ef37ce64-2c26-4080-899a-6d9dbb5850c9/ef37ce64-2c26-4080-899a-6d9dbb5850c9.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 921.774650] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 921.775192] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d79fb11f-0d10-49e9-9d4a-0f0c779ae1e2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.784026] env[69994]: DEBUG oslo_vmware.api [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 921.784026] env[69994]: value = "task-3242085" [ 921.784026] env[69994]: _type = "Task" [ 921.784026] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.797665] env[69994]: DEBUG oslo_vmware.api [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242085, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.881507] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242084, 'name': CreateVM_Task, 'duration_secs': 0.386232} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.885477] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 921.889198] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.889415] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 921.893018] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 921.893018] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16622da9-86f8-4b16-9e6e-d58ac84ad82e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.898014] env[69994]: DEBUG oslo_vmware.api [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Waiting for the task: (returnval){ [ 921.898014] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52bfdc02-f58e-9795-4757-ac8c33dfa80d" [ 921.898014] env[69994]: _type = "Task" [ 921.898014] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.910710] env[69994]: DEBUG oslo_vmware.api [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52bfdc02-f58e-9795-4757-ac8c33dfa80d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.919032] env[69994]: DEBUG oslo_concurrency.lockutils [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Releasing lock "refresh_cache-80705dfe-4768-4f35-8acf-316b15814f78" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 921.919115] env[69994]: DEBUG nova.compute.manager [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Instance network_info: |[{"id": "ea997e7f-79b4-40ab-bd44-2882bc9ec8fa", "address": "fa:16:3e:4a:cd:4c", "network": {"id": "e0a34d39-6f06-4258-9bc2-7b70aa37964a", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-455935249-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16b66dfea80140689fa05c54842cdf96", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b7a73c01-1bb9-4612-a1a7-16d71b732e81", "external-id": "nsx-vlan-transportzone-711", "segmentation_id": 711, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea997e7f-79", "ovs_interfaceid": "ea997e7f-79b4-40ab-bd44-2882bc9ec8fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 921.919503] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4a:cd:4c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b7a73c01-1bb9-4612-a1a7-16d71b732e81', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ea997e7f-79b4-40ab-bd44-2882bc9ec8fa', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 921.927668] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 921.927896] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 921.928507] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bc34ca56-59a7-42b5-a8a6-9728f88416bd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.952638] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 921.952638] env[69994]: value = "task-3242086" [ 921.952638] env[69994]: _type = "Task" [ 921.952638] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.963841] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242086, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.118548] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: aeb7928a-8307-49e7-b019-a4c674e6369a] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 922.154037] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7a1661f-3de6-4ae2-8c0f-4e8a189c6d09 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.165419] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff321529-26c9-4485-b450-9edd069a7e83 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.206162] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb204666-be0f-41c3-a3b9-332a89ab11d7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.215784] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c936b12-5914-4d94-bd95-dce4cbe931b7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.231216] env[69994]: DEBUG nova.compute.provider_tree [None req-2f1aa446-0ae9-453c-bc74-b671935b11e0 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 922.252141] env[69994]: DEBUG oslo_vmware.api [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Task: {'id': task-3242080, 'name': ReconfigVM_Task, 'duration_secs': 0.618385} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.253430] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Reconfigured VM instance instance-00000045 to attach disk [datastore1] volume-f439bb0f-f9f2-4fca-9d5c-9ad196d08d41/volume-f439bb0f-f9f2-4fca-9d5c-9ad196d08d41.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 922.257412] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c1e72fb7-5bf5-4f0f-acc5-34928cda329b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.275261] env[69994]: DEBUG oslo_vmware.api [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Waiting for the task: (returnval){ [ 922.275261] env[69994]: value = "task-3242087" [ 922.275261] env[69994]: _type = "Task" [ 922.275261] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.288687] env[69994]: DEBUG oslo_vmware.api [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Task: {'id': task-3242087, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.296467] env[69994]: DEBUG oslo_vmware.api [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242085, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078381} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.296751] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 922.297570] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5a4419d-730d-4920-933d-8a12fb020693 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.322978] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] ef37ce64-2c26-4080-899a-6d9dbb5850c9/ef37ce64-2c26-4080-899a-6d9dbb5850c9.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 922.324833] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-62a0ca22-23c7-4aa2-9df6-7ee5501d7807 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.351723] env[69994]: DEBUG oslo_vmware.api [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 922.351723] env[69994]: value = "task-3242088" [ 922.351723] env[69994]: _type = "Task" [ 922.351723] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.362415] env[69994]: DEBUG oslo_vmware.api [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242088, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.409949] env[69994]: DEBUG oslo_vmware.api [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52bfdc02-f58e-9795-4757-ac8c33dfa80d, 'name': SearchDatastore_Task, 'duration_secs': 0.027719} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.409949] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 922.410179] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 922.410438] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.410539] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 922.410710] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 922.411024] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-68ebeda7-909a-46b8-b1d8-6d10cf7c9c5e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.422729] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 922.422992] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 922.423896] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90e3a975-fa20-433d-a2de-50fd75ce872b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.431185] env[69994]: DEBUG oslo_vmware.api [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Waiting for the task: (returnval){ [ 922.431185] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52e8c020-727f-cb17-5466-42bdbbb9919f" [ 922.431185] env[69994]: _type = "Task" [ 922.431185] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.440484] env[69994]: DEBUG oslo_vmware.api [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52e8c020-727f-cb17-5466-42bdbbb9919f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.464756] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242086, 'name': CreateVM_Task, 'duration_secs': 0.390702} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.465033] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 922.465848] env[69994]: DEBUG oslo_concurrency.lockutils [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.465990] env[69994]: DEBUG oslo_concurrency.lockutils [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 922.466342] env[69994]: DEBUG oslo_concurrency.lockutils [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 922.466659] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30296c37-73e0-4818-a2b4-88addaaf5e65 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.472893] env[69994]: DEBUG oslo_vmware.api [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for the task: (returnval){ [ 922.472893] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52b65e6c-4128-71e7-2a98-a28029f3abac" [ 922.472893] env[69994]: _type = "Task" [ 922.472893] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.485158] env[69994]: DEBUG oslo_vmware.api [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52b65e6c-4128-71e7-2a98-a28029f3abac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.622563] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: c47c26c8-3f7f-436b-95aa-0bd08d41e62b] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 922.734973] env[69994]: DEBUG nova.scheduler.client.report [None req-2f1aa446-0ae9-453c-bc74-b671935b11e0 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 922.786499] env[69994]: DEBUG oslo_vmware.api [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Task: {'id': task-3242087, 'name': ReconfigVM_Task, 'duration_secs': 0.166838} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.786808] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647906', 'volume_id': 'f439bb0f-f9f2-4fca-9d5c-9ad196d08d41', 'name': 'volume-f439bb0f-f9f2-4fca-9d5c-9ad196d08d41', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c14851d2-66c5-4865-ae66-abbe303f0c31', 'attached_at': '', 'detached_at': '', 'volume_id': 'f439bb0f-f9f2-4fca-9d5c-9ad196d08d41', 'serial': 'f439bb0f-f9f2-4fca-9d5c-9ad196d08d41'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 922.787771] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-938ef07e-87fb-4c56-a49e-ef0e5b5b93a1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.795433] env[69994]: DEBUG oslo_vmware.api [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Waiting for the task: (returnval){ [ 922.795433] env[69994]: value = "task-3242089" [ 922.795433] env[69994]: _type = "Task" [ 922.795433] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.803422] env[69994]: DEBUG oslo_vmware.api [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Task: {'id': task-3242089, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.861793] env[69994]: DEBUG oslo_vmware.api [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242088, 'name': ReconfigVM_Task, 'duration_secs': 0.311845} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.862089] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Reconfigured VM instance instance-00000046 to attach disk [datastore1] ef37ce64-2c26-4080-899a-6d9dbb5850c9/ef37ce64-2c26-4080-899a-6d9dbb5850c9.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 922.862709] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e89db3ca-1c8e-49fb-91b0-084c001906b5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.870125] env[69994]: DEBUG oslo_vmware.api [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 922.870125] env[69994]: value = "task-3242090" [ 922.870125] env[69994]: _type = "Task" [ 922.870125] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.878667] env[69994]: DEBUG oslo_vmware.api [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242090, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.943315] env[69994]: DEBUG oslo_vmware.api [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52e8c020-727f-cb17-5466-42bdbbb9919f, 'name': SearchDatastore_Task, 'duration_secs': 0.011302} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.944378] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9de59a54-eebb-460a-a098-7d42f40a69c6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.950522] env[69994]: DEBUG oslo_vmware.api [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Waiting for the task: (returnval){ [ 922.950522] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]525614e4-7005-ab94-d1c3-f815c244ac03" [ 922.950522] env[69994]: _type = "Task" [ 922.950522] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.961455] env[69994]: DEBUG oslo_vmware.api [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]525614e4-7005-ab94-d1c3-f815c244ac03, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.984495] env[69994]: DEBUG oslo_vmware.api [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52b65e6c-4128-71e7-2a98-a28029f3abac, 'name': SearchDatastore_Task, 'duration_secs': 0.012404} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.984840] env[69994]: DEBUG oslo_concurrency.lockutils [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 922.985152] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 922.985418] env[69994]: DEBUG oslo_concurrency.lockutils [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.127352] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 5badecfd-5784-4968-8519-419a01c67465] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 923.240954] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2f1aa446-0ae9-453c-bc74-b671935b11e0 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.009s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 923.243589] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1b9acb53-e17c-4b6a-ba36-5ff3042416d2 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.257s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 923.244285] env[69994]: DEBUG nova.objects.instance [None req-1b9acb53-e17c-4b6a-ba36-5ff3042416d2 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Lazy-loading 'resources' on Instance uuid e3697388-4598-4dde-8c20-43fc7665083b {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 923.264530] env[69994]: INFO nova.scheduler.client.report [None req-2f1aa446-0ae9-453c-bc74-b671935b11e0 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Deleted allocations for instance b003b7c2-e754-440e-8a65-13c5e9c68cd5 [ 923.305562] env[69994]: DEBUG oslo_vmware.api [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Task: {'id': task-3242089, 'name': Rename_Task, 'duration_secs': 0.146474} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.307591] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 923.307591] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f99b49ae-b9fe-4f27-8ffb-119e10541c9d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.313177] env[69994]: DEBUG oslo_vmware.api [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Waiting for the task: (returnval){ [ 923.313177] env[69994]: value = "task-3242091" [ 923.313177] env[69994]: _type = "Task" [ 923.313177] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.321561] env[69994]: DEBUG oslo_vmware.api [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Task: {'id': task-3242091, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.380586] env[69994]: DEBUG oslo_vmware.api [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242090, 'name': Rename_Task, 'duration_secs': 0.166163} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.380860] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 923.381125] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-513b5e5e-e1b8-49d4-a3e3-540efdbca3db {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.388316] env[69994]: DEBUG oslo_vmware.api [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 923.388316] env[69994]: value = "task-3242092" [ 923.388316] env[69994]: _type = "Task" [ 923.388316] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.398614] env[69994]: DEBUG oslo_vmware.api [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242092, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.463901] env[69994]: DEBUG oslo_vmware.api [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]525614e4-7005-ab94-d1c3-f815c244ac03, 'name': SearchDatastore_Task, 'duration_secs': 0.010233} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.464200] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 923.464462] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 17389887-5463-44e1-b1c0-f123d8dedec7/17389887-5463-44e1-b1c0-f123d8dedec7.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 923.464751] env[69994]: DEBUG oslo_concurrency.lockutils [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 923.464941] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 923.465204] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-618676c4-9f16-4fde-990f-b8266b0c22b2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.467628] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7e7f97e9-4d9d-43fa-a4ae-5aa90bd0bb68 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.476168] env[69994]: DEBUG oslo_vmware.api [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Waiting for the task: (returnval){ [ 923.476168] env[69994]: value = "task-3242093" [ 923.476168] env[69994]: _type = "Task" [ 923.476168] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.477743] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 923.478012] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 923.482152] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17b7ea51-85e1-44dd-9661-69b0c62f17e7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.492486] env[69994]: DEBUG oslo_vmware.api [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for the task: (returnval){ [ 923.492486] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52300d52-8741-565f-a3d1-7903bed1f000" [ 923.492486] env[69994]: _type = "Task" [ 923.492486] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.492757] env[69994]: DEBUG oslo_vmware.api [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': task-3242093, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.502402] env[69994]: DEBUG oslo_vmware.api [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52300d52-8741-565f-a3d1-7903bed1f000, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.581400] env[69994]: DEBUG oslo_concurrency.lockutils [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "e638fe4f-5f75-4d38-8a58-15dd66fd9e27" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 923.581557] env[69994]: DEBUG oslo_concurrency.lockutils [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "e638fe4f-5f75-4d38-8a58-15dd66fd9e27" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 923.631895] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 91bb882c-7b84-450f-bd03-91ea1ce739ce] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 923.774645] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2f1aa446-0ae9-453c-bc74-b671935b11e0 tempest-VolumesAdminNegativeTest-1388262934 tempest-VolumesAdminNegativeTest-1388262934-project-member] Lock "b003b7c2-e754-440e-8a65-13c5e9c68cd5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.185s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 923.827726] env[69994]: DEBUG oslo_vmware.api [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Task: {'id': task-3242091, 'name': PowerOnVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.905708] env[69994]: DEBUG oslo_vmware.api [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242092, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.987970] env[69994]: DEBUG oslo_vmware.api [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': task-3242093, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.469681} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.990869] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 17389887-5463-44e1-b1c0-f123d8dedec7/17389887-5463-44e1-b1c0-f123d8dedec7.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 923.992031] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 923.992031] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-52cad930-2bd6-4193-a710-7e7be0d88d68 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.003268] env[69994]: DEBUG oslo_vmware.api [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52300d52-8741-565f-a3d1-7903bed1f000, 'name': SearchDatastore_Task, 'duration_secs': 0.013188} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.008106] env[69994]: DEBUG oslo_vmware.api [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Waiting for the task: (returnval){ [ 924.008106] env[69994]: value = "task-3242094" [ 924.008106] env[69994]: _type = "Task" [ 924.008106] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.008551] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10916da0-52a0-40cd-be9d-8ca62c8e6207 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.019730] env[69994]: DEBUG oslo_vmware.api [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': task-3242094, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.023392] env[69994]: DEBUG oslo_vmware.api [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for the task: (returnval){ [ 924.023392] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]524fa107-907d-8496-3d97-fb1ec95daba6" [ 924.023392] env[69994]: _type = "Task" [ 924.023392] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.037094] env[69994]: DEBUG oslo_vmware.api [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]524fa107-907d-8496-3d97-fb1ec95daba6, 'name': SearchDatastore_Task, 'duration_secs': 0.015597} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.040344] env[69994]: DEBUG oslo_concurrency.lockutils [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 924.040616] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 80705dfe-4768-4f35-8acf-316b15814f78/80705dfe-4768-4f35-8acf-316b15814f78.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 924.042155] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1f7dc316-d6e0-4138-942a-cd403dee8c21 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.052748] env[69994]: DEBUG oslo_vmware.api [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for the task: (returnval){ [ 924.052748] env[69994]: value = "task-3242095" [ 924.052748] env[69994]: _type = "Task" [ 924.052748] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.067588] env[69994]: DEBUG oslo_vmware.api [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3242095, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.085776] env[69994]: DEBUG nova.compute.manager [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 924.137820] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 7e7953f7-ed5d-4515-9181-93d343ad772d] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 924.141267] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-883412fe-79f0-4adb-b051-c7be86260260 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.150662] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab77b648-83fa-4aca-a951-56ea3e210fdf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.186549] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d588eaa7-5542-4f2b-a233-df3d88a6500e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.197624] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dc67a30-1d3b-4228-915b-1338c46b8802 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.212327] env[69994]: DEBUG nova.compute.provider_tree [None req-1b9acb53-e17c-4b6a-ba36-5ff3042416d2 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 924.324023] env[69994]: DEBUG oslo_vmware.api [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Task: {'id': task-3242091, 'name': PowerOnVM_Task, 'duration_secs': 0.698395} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.324300] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 924.324505] env[69994]: INFO nova.compute.manager [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Took 9.02 seconds to spawn the instance on the hypervisor. [ 924.324685] env[69994]: DEBUG nova.compute.manager [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 924.325469] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b2b9fd0-ef4a-40ab-a1a1-011ffaa21643 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.402020] env[69994]: DEBUG oslo_vmware.api [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242092, 'name': PowerOnVM_Task, 'duration_secs': 0.716942} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.402020] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 924.402020] env[69994]: INFO nova.compute.manager [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Took 8.30 seconds to spawn the instance on the hypervisor. [ 924.402020] env[69994]: DEBUG nova.compute.manager [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 924.402020] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-583de403-f94e-44af-bd37-4079ea75df77 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.522315] env[69994]: DEBUG oslo_vmware.api [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': task-3242094, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.219153} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.522617] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 924.523420] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e514b2e0-410f-41f4-8e95-2c88523eed1f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.544322] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] 17389887-5463-44e1-b1c0-f123d8dedec7/17389887-5463-44e1-b1c0-f123d8dedec7.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 924.544619] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-53db2e89-c493-4afd-a13a-a449a3f4c5de {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.571159] env[69994]: DEBUG oslo_vmware.api [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3242095, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.572776] env[69994]: DEBUG oslo_vmware.api [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Waiting for the task: (returnval){ [ 924.572776] env[69994]: value = "task-3242096" [ 924.572776] env[69994]: _type = "Task" [ 924.572776] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.584165] env[69994]: DEBUG oslo_vmware.api [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': task-3242096, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.617306] env[69994]: DEBUG oslo_concurrency.lockutils [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 924.646036] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: f109c803-bf37-4845-8956-4336dbc8a946] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 924.717066] env[69994]: DEBUG nova.scheduler.client.report [None req-1b9acb53-e17c-4b6a-ba36-5ff3042416d2 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 924.844516] env[69994]: INFO nova.compute.manager [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Took 56.85 seconds to build instance. [ 924.924196] env[69994]: INFO nova.compute.manager [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Took 51.94 seconds to build instance. [ 925.076377] env[69994]: DEBUG oslo_vmware.api [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3242095, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.816637} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.078283] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 80705dfe-4768-4f35-8acf-316b15814f78/80705dfe-4768-4f35-8acf-316b15814f78.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 925.078514] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 925.078789] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-de5eb4e9-94b5-46ec-8e2f-27c64879279d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.086421] env[69994]: DEBUG oslo_vmware.api [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': task-3242096, 'name': ReconfigVM_Task, 'duration_secs': 0.355516} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.087635] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Reconfigured VM instance instance-00000048 to attach disk [datastore1] 17389887-5463-44e1-b1c0-f123d8dedec7/17389887-5463-44e1-b1c0-f123d8dedec7.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 925.088451] env[69994]: DEBUG oslo_vmware.api [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for the task: (returnval){ [ 925.088451] env[69994]: value = "task-3242097" [ 925.088451] env[69994]: _type = "Task" [ 925.088451] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.088757] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-df821107-e41b-4bfd-9772-09b3671c0a42 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.099926] env[69994]: DEBUG oslo_vmware.api [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3242097, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.101727] env[69994]: DEBUG oslo_vmware.api [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Waiting for the task: (returnval){ [ 925.101727] env[69994]: value = "task-3242098" [ 925.101727] env[69994]: _type = "Task" [ 925.101727] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.112230] env[69994]: DEBUG oslo_vmware.api [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': task-3242098, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.150998] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 21f66039-6292-4d9c-b97d-668d029def24] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 925.225072] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1b9acb53-e17c-4b6a-ba36-5ff3042416d2 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.980s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 925.225877] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fac2e267-f136-4920-831d-408d8be6e17f tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 16.471s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 925.246020] env[69994]: INFO nova.scheduler.client.report [None req-1b9acb53-e17c-4b6a-ba36-5ff3042416d2 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Deleted allocations for instance e3697388-4598-4dde-8c20-43fc7665083b [ 925.346578] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8a571310-c7cb-4b4e-af7a-0f6254f568c7 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Lock "c14851d2-66c5-4865-ae66-abbe303f0c31" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.356s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 925.427197] env[69994]: DEBUG oslo_concurrency.lockutils [None req-163de4f5-9d15-4f7c-b2db-d7da06c06874 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "ef37ce64-2c26-4080-899a-6d9dbb5850c9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.457s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 925.606620] env[69994]: DEBUG oslo_vmware.api [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3242097, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079468} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.613129] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 925.613341] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-059de125-704a-468a-803d-436e80934127 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.627026] env[69994]: DEBUG oslo_vmware.api [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': task-3242098, 'name': Rename_Task, 'duration_secs': 0.152827} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.635208] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 925.646500] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] 80705dfe-4768-4f35-8acf-316b15814f78/80705dfe-4768-4f35-8acf-316b15814f78.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 925.649632] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9f50f6a4-f7a6-4e96-84dd-fc1949ff833f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.649632] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2cfd6890-4ee9-4f8b-9444-9a6bfe38215d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.666963] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 2f710439-0216-401e-9759-af584f9bd00d] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 925.675267] env[69994]: DEBUG oslo_vmware.api [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for the task: (returnval){ [ 925.675267] env[69994]: value = "task-3242100" [ 925.675267] env[69994]: _type = "Task" [ 925.675267] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.679022] env[69994]: DEBUG oslo_vmware.api [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Waiting for the task: (returnval){ [ 925.679022] env[69994]: value = "task-3242099" [ 925.679022] env[69994]: _type = "Task" [ 925.679022] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.691345] env[69994]: DEBUG oslo_vmware.api [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3242100, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.698210] env[69994]: DEBUG oslo_vmware.api [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': task-3242099, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.756643] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1b9acb53-e17c-4b6a-ba36-5ff3042416d2 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Lock "e3697388-4598-4dde-8c20-43fc7665083b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.313s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 926.097142] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9332b57-04c0-4fba-83b3-1126569e8be4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.104711] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3edaef6-4179-42e7-8938-82dbd039cc01 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.140767] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00932898-0de4-4c57-95d3-bdbcc793f976 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.150094] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfc08f7c-e116-43cd-a62e-ddde6275407e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.171018] env[69994]: DEBUG nova.compute.provider_tree [None req-fac2e267-f136-4920-831d-408d8be6e17f tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 926.173573] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 53a8714c-50f7-4990-a3d9-86f8fc908d03] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 926.191053] env[69994]: DEBUG oslo_vmware.api [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3242100, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.193871] env[69994]: DEBUG oslo_vmware.api [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': task-3242099, 'name': PowerOnVM_Task, 'duration_secs': 0.433852} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.194362] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 926.194573] env[69994]: INFO nova.compute.manager [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Took 4.97 seconds to spawn the instance on the hypervisor. [ 926.194753] env[69994]: DEBUG nova.compute.manager [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 926.195780] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5593e32a-6a96-4021-886e-3d003e523965 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.677227] env[69994]: DEBUG nova.scheduler.client.report [None req-fac2e267-f136-4920-831d-408d8be6e17f tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 926.682551] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 84efe900-1d79-42f9-b3c6-54299757cdbc] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 926.697115] env[69994]: DEBUG oslo_vmware.api [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3242100, 'name': ReconfigVM_Task, 'duration_secs': 0.80916} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.698067] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Reconfigured VM instance instance-00000047 to attach disk [datastore1] 80705dfe-4768-4f35-8acf-316b15814f78/80705dfe-4768-4f35-8acf-316b15814f78.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 926.698701] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6ac91e6d-bf5d-467c-8890-7eea220dca14 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.711300] env[69994]: DEBUG oslo_vmware.api [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for the task: (returnval){ [ 926.711300] env[69994]: value = "task-3242101" [ 926.711300] env[69994]: _type = "Task" [ 926.711300] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.715425] env[69994]: INFO nova.compute.manager [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Took 45.44 seconds to build instance. [ 926.725168] env[69994]: DEBUG oslo_vmware.api [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3242101, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.192028] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 6ca85dc6-ace9-4c5e-a11e-a3d5060d766b] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 927.218632] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5db79f78-3905-4276-874f-ec6c01311e17 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Lock "17389887-5463-44e1-b1c0-f123d8dedec7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.955s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 927.226050] env[69994]: DEBUG oslo_vmware.api [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3242101, 'name': Rename_Task, 'duration_secs': 0.183888} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.226574] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 927.226985] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7b5290ce-1d4b-44c7-94a5-10f4009225a2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.236702] env[69994]: DEBUG oslo_vmware.api [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for the task: (returnval){ [ 927.236702] env[69994]: value = "task-3242102" [ 927.236702] env[69994]: _type = "Task" [ 927.236702] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.244010] env[69994]: DEBUG nova.compute.manager [req-8c851913-7c49-4cea-be22-343243bda765 req-f68e9a14-a9ec-44a0-ab64-3983c2b234d4 service nova] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Received event network-changed-58a0ef78-0177-4996-ba8f-adbf83a9c0e6 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 927.244257] env[69994]: DEBUG nova.compute.manager [req-8c851913-7c49-4cea-be22-343243bda765 req-f68e9a14-a9ec-44a0-ab64-3983c2b234d4 service nova] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Refreshing instance network info cache due to event network-changed-58a0ef78-0177-4996-ba8f-adbf83a9c0e6. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 927.244473] env[69994]: DEBUG oslo_concurrency.lockutils [req-8c851913-7c49-4cea-be22-343243bda765 req-f68e9a14-a9ec-44a0-ab64-3983c2b234d4 service nova] Acquiring lock "refresh_cache-c14851d2-66c5-4865-ae66-abbe303f0c31" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.244612] env[69994]: DEBUG oslo_concurrency.lockutils [req-8c851913-7c49-4cea-be22-343243bda765 req-f68e9a14-a9ec-44a0-ab64-3983c2b234d4 service nova] Acquired lock "refresh_cache-c14851d2-66c5-4865-ae66-abbe303f0c31" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 927.244788] env[69994]: DEBUG nova.network.neutron [req-8c851913-7c49-4cea-be22-343243bda765 req-f68e9a14-a9ec-44a0-ab64-3983c2b234d4 service nova] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Refreshing network info cache for port 58a0ef78-0177-4996-ba8f-adbf83a9c0e6 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 927.253267] env[69994]: DEBUG oslo_vmware.api [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3242102, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.696520] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fac2e267-f136-4920-831d-408d8be6e17f tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.470s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 927.696768] env[69994]: DEBUG nova.compute.manager [None req-fac2e267-f136-4920-831d-408d8be6e17f tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=69994) _confirm_resize /opt/stack/nova/nova/compute/manager.py:5376}} [ 927.701564] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: e9bc15f9-e957-487f-b8d5-d1332b185dcf] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 927.704258] env[69994]: DEBUG oslo_concurrency.lockutils [None req-29070d91-5318-4a32-a07c-45df93d1a66c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.309s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 927.704258] env[69994]: DEBUG nova.objects.instance [None req-29070d91-5318-4a32-a07c-45df93d1a66c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lazy-loading 'resources' on Instance uuid 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 927.754925] env[69994]: DEBUG oslo_vmware.api [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3242102, 'name': PowerOnVM_Task} progress is 87%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.055416] env[69994]: DEBUG nova.network.neutron [req-8c851913-7c49-4cea-be22-343243bda765 req-f68e9a14-a9ec-44a0-ab64-3983c2b234d4 service nova] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Updated VIF entry in instance network info cache for port 58a0ef78-0177-4996-ba8f-adbf83a9c0e6. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 928.055827] env[69994]: DEBUG nova.network.neutron [req-8c851913-7c49-4cea-be22-343243bda765 req-f68e9a14-a9ec-44a0-ab64-3983c2b234d4 service nova] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Updating instance_info_cache with network_info: [{"id": "58a0ef78-0177-4996-ba8f-adbf83a9c0e6", "address": "fa:16:3e:db:90:66", "network": {"id": "61112f44-6155-452d-9ba0-0800f3ec9a9e", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-2056121607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aacff9947d8149e084d2b17f07f3d24c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "572b7281-aad3-45fa-9cb2-fc1c70569948", "external-id": "nsx-vlan-transportzone-722", "segmentation_id": 722, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58a0ef78-01", "ovs_interfaceid": "58a0ef78-0177-4996-ba8f-adbf83a9c0e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.208884] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 15d17772-ac57-49a3-b261-bf49b902f658] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 928.263988] env[69994]: DEBUG oslo_vmware.api [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3242102, 'name': PowerOnVM_Task, 'duration_secs': 0.96642} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.263988] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 928.263988] env[69994]: INFO nova.compute.manager [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Took 9.59 seconds to spawn the instance on the hypervisor. [ 928.264118] env[69994]: DEBUG nova.compute.manager [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 928.264924] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67aac56d-aa66-4327-b2c4-0a06c7b80c15 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.299314] env[69994]: INFO nova.scheduler.client.report [None req-fac2e267-f136-4920-831d-408d8be6e17f tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Deleted allocation for migration a470e5c8-07de-40fb-a4de-5addff23af5a [ 928.562151] env[69994]: DEBUG oslo_concurrency.lockutils [req-8c851913-7c49-4cea-be22-343243bda765 req-f68e9a14-a9ec-44a0-ab64-3983c2b234d4 service nova] Releasing lock "refresh_cache-c14851d2-66c5-4865-ae66-abbe303f0c31" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 928.592143] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-857a1a40-a4ce-4e33-99c0-8c499d859493 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.599903] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb3c67d7-7f4a-4316-8d34-e86fe0633e09 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.633619] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1429461b-9086-48d8-ba20-8ccafa56a389 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.642546] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8854803c-67a4-4d49-aeae-44e600fbad81 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.659228] env[69994]: DEBUG nova.compute.provider_tree [None req-29070d91-5318-4a32-a07c-45df93d1a66c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 928.721576] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 2244e8ad-75f6-42bc-a97d-7f26eaba1aa2] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 928.788016] env[69994]: INFO nova.compute.manager [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Took 48.00 seconds to build instance. [ 928.814953] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fac2e267-f136-4920-831d-408d8be6e17f tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "45a8dced-6c49-441c-92e2-ee323ed8753c" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 22.946s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 929.164112] env[69994]: DEBUG nova.scheduler.client.report [None req-29070d91-5318-4a32-a07c-45df93d1a66c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 929.228120] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 91666839-f440-499e-acf0-07d352e701ab] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 929.233020] env[69994]: DEBUG nova.objects.instance [None req-97c859cb-8c47-42e2-8090-bf313e4b6e88 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lazy-loading 'flavor' on Instance uuid 45a8dced-6c49-441c-92e2-ee323ed8753c {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 929.292242] env[69994]: DEBUG oslo_concurrency.lockutils [None req-91fa46a5-1f07-413f-b014-de3d1816ea4d tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Lock "80705dfe-4768-4f35-8acf-316b15814f78" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.969s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 929.361395] env[69994]: DEBUG nova.compute.manager [req-9be9741e-f16a-459e-b6ca-72108246d66a req-632c7c7b-ebca-4bbe-8c21-efeff6f40b54 service nova] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Received event network-changed-75af8d87-ecba-45ba-867a-8c8e9c0389c9 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 929.361703] env[69994]: DEBUG nova.compute.manager [req-9be9741e-f16a-459e-b6ca-72108246d66a req-632c7c7b-ebca-4bbe-8c21-efeff6f40b54 service nova] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Refreshing instance network info cache due to event network-changed-75af8d87-ecba-45ba-867a-8c8e9c0389c9. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 929.362020] env[69994]: DEBUG oslo_concurrency.lockutils [req-9be9741e-f16a-459e-b6ca-72108246d66a req-632c7c7b-ebca-4bbe-8c21-efeff6f40b54 service nova] Acquiring lock "refresh_cache-ef37ce64-2c26-4080-899a-6d9dbb5850c9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.362172] env[69994]: DEBUG oslo_concurrency.lockutils [req-9be9741e-f16a-459e-b6ca-72108246d66a req-632c7c7b-ebca-4bbe-8c21-efeff6f40b54 service nova] Acquired lock "refresh_cache-ef37ce64-2c26-4080-899a-6d9dbb5850c9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 929.362340] env[69994]: DEBUG nova.network.neutron [req-9be9741e-f16a-459e-b6ca-72108246d66a req-632c7c7b-ebca-4bbe-8c21-efeff6f40b54 service nova] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Refreshing network info cache for port 75af8d87-ecba-45ba-867a-8c8e9c0389c9 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 929.415402] env[69994]: INFO nova.compute.manager [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Rebuilding instance [ 929.465874] env[69994]: DEBUG nova.compute.manager [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 929.466999] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73edeb40-c372-4a92-bc45-105257ac78ec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.673218] env[69994]: DEBUG oslo_concurrency.lockutils [None req-29070d91-5318-4a32-a07c-45df93d1a66c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.969s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 929.676551] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0fde11a6-4db5-4b29-9b79-731336ca5aaa tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.339s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 929.676844] env[69994]: DEBUG nova.objects.instance [None req-0fde11a6-4db5-4b29-9b79-731336ca5aaa tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Lazy-loading 'resources' on Instance uuid c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 929.698522] env[69994]: INFO nova.scheduler.client.report [None req-29070d91-5318-4a32-a07c-45df93d1a66c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Deleted allocations for instance 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24 [ 929.727582] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59c5e4cb-172c-45f9-8487-0b04e3a163a9 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Acquiring lock "80705dfe-4768-4f35-8acf-316b15814f78" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 929.727897] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59c5e4cb-172c-45f9-8487-0b04e3a163a9 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Lock "80705dfe-4768-4f35-8acf-316b15814f78" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 929.729045] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59c5e4cb-172c-45f9-8487-0b04e3a163a9 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Acquiring lock "80705dfe-4768-4f35-8acf-316b15814f78-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 929.729045] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59c5e4cb-172c-45f9-8487-0b04e3a163a9 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Lock "80705dfe-4768-4f35-8acf-316b15814f78-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 929.729045] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59c5e4cb-172c-45f9-8487-0b04e3a163a9 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Lock "80705dfe-4768-4f35-8acf-316b15814f78-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 929.730538] env[69994]: INFO nova.compute.manager [None req-59c5e4cb-172c-45f9-8487-0b04e3a163a9 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Terminating instance [ 929.735788] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: f3945280-ee10-426b-bcab-3e52e8779c55] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 929.744024] env[69994]: DEBUG oslo_concurrency.lockutils [None req-97c859cb-8c47-42e2-8090-bf313e4b6e88 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "refresh_cache-45a8dced-6c49-441c-92e2-ee323ed8753c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.744024] env[69994]: DEBUG oslo_concurrency.lockutils [None req-97c859cb-8c47-42e2-8090-bf313e4b6e88 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquired lock "refresh_cache-45a8dced-6c49-441c-92e2-ee323ed8753c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 929.744024] env[69994]: DEBUG nova.network.neutron [None req-97c859cb-8c47-42e2-8090-bf313e4b6e88 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 929.744024] env[69994]: DEBUG nova.objects.instance [None req-97c859cb-8c47-42e2-8090-bf313e4b6e88 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lazy-loading 'info_cache' on Instance uuid 45a8dced-6c49-441c-92e2-ee323ed8753c {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 930.063318] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e6d9089e-8206-49fa-a687-343cf7342d36 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Acquiring lock "8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 930.063572] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e6d9089e-8206-49fa-a687-343cf7342d36 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Lock "8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 930.063779] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e6d9089e-8206-49fa-a687-343cf7342d36 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Acquiring lock "8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 930.063953] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e6d9089e-8206-49fa-a687-343cf7342d36 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Lock "8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 930.064139] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e6d9089e-8206-49fa-a687-343cf7342d36 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Lock "8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 930.068518] env[69994]: INFO nova.compute.manager [None req-e6d9089e-8206-49fa-a687-343cf7342d36 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Terminating instance [ 930.207734] env[69994]: DEBUG oslo_concurrency.lockutils [None req-29070d91-5318-4a32-a07c-45df93d1a66c tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "7f66a148-86fe-4ddc-b8ed-6e6a306bbc24" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.915s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 930.234393] env[69994]: DEBUG nova.network.neutron [req-9be9741e-f16a-459e-b6ca-72108246d66a req-632c7c7b-ebca-4bbe-8c21-efeff6f40b54 service nova] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Updated VIF entry in instance network info cache for port 75af8d87-ecba-45ba-867a-8c8e9c0389c9. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 930.234770] env[69994]: DEBUG nova.network.neutron [req-9be9741e-f16a-459e-b6ca-72108246d66a req-632c7c7b-ebca-4bbe-8c21-efeff6f40b54 service nova] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Updating instance_info_cache with network_info: [{"id": "75af8d87-ecba-45ba-867a-8c8e9c0389c9", "address": "fa:16:3e:5c:3b:72", "network": {"id": "84a79aab-0d1e-4aaf-9422-316cb7d239fb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1732515887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.240", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccb64f97e46a4e499df974959db53dcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75af8d87-ec", "ovs_interfaceid": "75af8d87-ecba-45ba-867a-8c8e9c0389c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.237754] env[69994]: DEBUG nova.compute.manager [None req-59c5e4cb-172c-45f9-8487-0b04e3a163a9 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 930.237957] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-59c5e4cb-172c-45f9-8487-0b04e3a163a9 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 930.244305] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa0826e-2ccf-4fd5-a5d9-6caa53d37621 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.248659] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 2e374549-00a2-4014-90e0-ceccbe4360fa] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 930.251582] env[69994]: DEBUG nova.objects.base [None req-97c859cb-8c47-42e2-8090-bf313e4b6e88 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Object Instance<45a8dced-6c49-441c-92e2-ee323ed8753c> lazy-loaded attributes: flavor,info_cache {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 930.261970] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-59c5e4cb-172c-45f9-8487-0b04e3a163a9 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 930.264364] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9cbe3913-cf09-46c9-9a63-b9a836909a88 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.273036] env[69994]: DEBUG oslo_vmware.api [None req-59c5e4cb-172c-45f9-8487-0b04e3a163a9 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for the task: (returnval){ [ 930.273036] env[69994]: value = "task-3242103" [ 930.273036] env[69994]: _type = "Task" [ 930.273036] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.283323] env[69994]: DEBUG oslo_vmware.api [None req-59c5e4cb-172c-45f9-8487-0b04e3a163a9 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3242103, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.486472] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 930.487178] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c0f67dc7-3ace-47eb-b041-c423247a7e97 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.496022] env[69994]: DEBUG oslo_vmware.api [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Waiting for the task: (returnval){ [ 930.496022] env[69994]: value = "task-3242104" [ 930.496022] env[69994]: _type = "Task" [ 930.496022] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.509485] env[69994]: DEBUG oslo_vmware.api [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': task-3242104, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.564445] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4e8ae66-ed45-4310-bf78-f0b84ab04818 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.573191] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c411b663-0f77-407f-aef0-7dd569de9234 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.577254] env[69994]: DEBUG nova.compute.manager [None req-e6d9089e-8206-49fa-a687-343cf7342d36 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 930.578074] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e6d9089e-8206-49fa-a687-343cf7342d36 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 930.578210] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08e76efd-953a-4860-b48e-dfb89d4e70ba {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.618314] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6d9089e-8206-49fa-a687-343cf7342d36 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 930.619415] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bfcddeaa-7d02-433b-8518-c1fd417c8998 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.621983] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5bc091f-2e80-4770-a252-8482b7e6c5d6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.633385] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b982b62-d719-4bbf-8923-307904b45dbf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.637745] env[69994]: DEBUG oslo_vmware.api [None req-e6d9089e-8206-49fa-a687-343cf7342d36 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Waiting for the task: (returnval){ [ 930.637745] env[69994]: value = "task-3242105" [ 930.637745] env[69994]: _type = "Task" [ 930.637745] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.654332] env[69994]: DEBUG nova.compute.provider_tree [None req-0fde11a6-4db5-4b29-9b79-731336ca5aaa tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 930.662623] env[69994]: DEBUG oslo_vmware.api [None req-e6d9089e-8206-49fa-a687-343cf7342d36 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': task-3242105, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.744245] env[69994]: DEBUG oslo_concurrency.lockutils [req-9be9741e-f16a-459e-b6ca-72108246d66a req-632c7c7b-ebca-4bbe-8c21-efeff6f40b54 service nova] Releasing lock "refresh_cache-ef37ce64-2c26-4080-899a-6d9dbb5850c9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 930.756103] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 930.756264] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Cleaning up deleted instances with incomplete migration {{(pid=69994) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11872}} [ 930.784173] env[69994]: DEBUG oslo_vmware.api [None req-59c5e4cb-172c-45f9-8487-0b04e3a163a9 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3242103, 'name': PowerOffVM_Task, 'duration_secs': 0.222894} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.784442] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-59c5e4cb-172c-45f9-8487-0b04e3a163a9 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 930.784604] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-59c5e4cb-172c-45f9-8487-0b04e3a163a9 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 930.788030] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d9122ac5-bbee-49d7-a572-7d6d6e4a2d20 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.874300] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-59c5e4cb-172c-45f9-8487-0b04e3a163a9 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 930.874554] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-59c5e4cb-172c-45f9-8487-0b04e3a163a9 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 930.874751] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-59c5e4cb-172c-45f9-8487-0b04e3a163a9 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Deleting the datastore file [datastore1] 80705dfe-4768-4f35-8acf-316b15814f78 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 930.875081] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d6128a0f-897b-44f2-b089-90e751f484eb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.883888] env[69994]: DEBUG oslo_vmware.api [None req-59c5e4cb-172c-45f9-8487-0b04e3a163a9 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for the task: (returnval){ [ 930.883888] env[69994]: value = "task-3242107" [ 930.883888] env[69994]: _type = "Task" [ 930.883888] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.895775] env[69994]: DEBUG oslo_vmware.api [None req-59c5e4cb-172c-45f9-8487-0b04e3a163a9 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3242107, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.990668] env[69994]: DEBUG nova.network.neutron [None req-97c859cb-8c47-42e2-8090-bf313e4b6e88 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Updating instance_info_cache with network_info: [{"id": "003af7d4-a8a5-43d4-b032-96df0b4ae173", "address": "fa:16:3e:a4:62:49", "network": {"id": "6d4a143e-4f55-4918-8454-462892aacf0e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1594130263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "605d72502cc644bfa4d875bf348246de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52e117d3-d120-42c6-8e72-70085845acbf", "external-id": "nsx-vlan-transportzone-934", "segmentation_id": 934, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap003af7d4-a8", "ovs_interfaceid": "003af7d4-a8a5-43d4-b032-96df0b4ae173", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.011527] env[69994]: DEBUG oslo_vmware.api [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': task-3242104, 'name': PowerOffVM_Task, 'duration_secs': 0.143597} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.011814] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 931.012479] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 931.013359] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f558c08e-56d9-445c-85ac-e7e1da0f5f02 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.022111] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 931.023570] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e236293f-0bf0-4dba-9c4a-4892d95d4685 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.055432] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 931.055561] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 931.055701] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Deleting the datastore file [datastore1] 17389887-5463-44e1-b1c0-f123d8dedec7 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 931.055973] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3afe59b3-70e1-406d-82e6-c3f0d088d7df {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.062861] env[69994]: DEBUG oslo_vmware.api [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Waiting for the task: (returnval){ [ 931.062861] env[69994]: value = "task-3242109" [ 931.062861] env[69994]: _type = "Task" [ 931.062861] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.073762] env[69994]: DEBUG oslo_vmware.api [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': task-3242109, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.148669] env[69994]: DEBUG oslo_vmware.api [None req-e6d9089e-8206-49fa-a687-343cf7342d36 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': task-3242105, 'name': PowerOffVM_Task, 'duration_secs': 0.192823} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.148943] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6d9089e-8206-49fa-a687-343cf7342d36 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 931.149210] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e6d9089e-8206-49fa-a687-343cf7342d36 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 931.149437] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ca853aff-0dfd-4913-8507-09b862870255 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.159306] env[69994]: DEBUG nova.scheduler.client.report [None req-0fde11a6-4db5-4b29-9b79-731336ca5aaa tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 931.242173] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e6d9089e-8206-49fa-a687-343cf7342d36 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 931.242528] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e6d9089e-8206-49fa-a687-343cf7342d36 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 931.242803] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6d9089e-8206-49fa-a687-343cf7342d36 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Deleting the datastore file [datastore2] 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 931.243017] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-50b21677-175e-4e8a-91ee-e07c37ed32f8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.252071] env[69994]: DEBUG oslo_vmware.api [None req-e6d9089e-8206-49fa-a687-343cf7342d36 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Waiting for the task: (returnval){ [ 931.252071] env[69994]: value = "task-3242111" [ 931.252071] env[69994]: _type = "Task" [ 931.252071] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.258370] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 931.268292] env[69994]: DEBUG oslo_vmware.api [None req-e6d9089e-8206-49fa-a687-343cf7342d36 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': task-3242111, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.397739] env[69994]: DEBUG oslo_vmware.api [None req-59c5e4cb-172c-45f9-8487-0b04e3a163a9 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Task: {'id': task-3242107, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169471} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.398454] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-59c5e4cb-172c-45f9-8487-0b04e3a163a9 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 931.398790] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-59c5e4cb-172c-45f9-8487-0b04e3a163a9 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 931.399103] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-59c5e4cb-172c-45f9-8487-0b04e3a163a9 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 931.399655] env[69994]: INFO nova.compute.manager [None req-59c5e4cb-172c-45f9-8487-0b04e3a163a9 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Took 1.16 seconds to destroy the instance on the hypervisor. [ 931.400051] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-59c5e4cb-172c-45f9-8487-0b04e3a163a9 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 931.400512] env[69994]: DEBUG nova.compute.manager [-] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 931.400943] env[69994]: DEBUG nova.network.neutron [-] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 931.494084] env[69994]: DEBUG oslo_concurrency.lockutils [None req-97c859cb-8c47-42e2-8090-bf313e4b6e88 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Releasing lock "refresh_cache-45a8dced-6c49-441c-92e2-ee323ed8753c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 931.578265] env[69994]: DEBUG oslo_vmware.api [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': task-3242109, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.105424} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.578998] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 931.581118] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 931.581118] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 931.665303] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0fde11a6-4db5-4b29-9b79-731336ca5aaa tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.989s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 931.668422] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 18.636s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 931.668930] env[69994]: DEBUG nova.objects.instance [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69994) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 931.704476] env[69994]: INFO nova.scheduler.client.report [None req-0fde11a6-4db5-4b29-9b79-731336ca5aaa tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Deleted allocations for instance c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc [ 931.763613] env[69994]: DEBUG oslo_vmware.api [None req-e6d9089e-8206-49fa-a687-343cf7342d36 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Task: {'id': task-3242111, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150865} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.764691] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6d9089e-8206-49fa-a687-343cf7342d36 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 931.765025] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e6d9089e-8206-49fa-a687-343cf7342d36 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 931.765706] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e6d9089e-8206-49fa-a687-343cf7342d36 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 931.766059] env[69994]: INFO nova.compute.manager [None req-e6d9089e-8206-49fa-a687-343cf7342d36 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Took 1.19 seconds to destroy the instance on the hypervisor. [ 931.766435] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e6d9089e-8206-49fa-a687-343cf7342d36 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 931.767323] env[69994]: DEBUG nova.compute.manager [-] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 931.767527] env[69994]: DEBUG nova.network.neutron [-] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 931.802906] env[69994]: DEBUG nova.compute.manager [req-20fed48e-b9ac-49ce-a687-13d06e7aca54 req-3d52e00b-8faf-40e5-b7bd-4957dc2a6e5f service nova] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Received event network-vif-deleted-ea997e7f-79b4-40ab-bd44-2882bc9ec8fa {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 931.802906] env[69994]: INFO nova.compute.manager [req-20fed48e-b9ac-49ce-a687-13d06e7aca54 req-3d52e00b-8faf-40e5-b7bd-4957dc2a6e5f service nova] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Neutron deleted interface ea997e7f-79b4-40ab-bd44-2882bc9ec8fa; detaching it from the instance and deleting it from the info cache [ 931.802906] env[69994]: DEBUG nova.network.neutron [req-20fed48e-b9ac-49ce-a687-13d06e7aca54 req-3d52e00b-8faf-40e5-b7bd-4957dc2a6e5f service nova] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.217581] env[69994]: DEBUG nova.network.neutron [-] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.218862] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0fde11a6-4db5-4b29-9b79-731336ca5aaa tempest-InstanceActionsV221TestJSON-1977546144 tempest-InstanceActionsV221TestJSON-1977546144-project-member] Lock "c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.686s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 932.308449] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-04f8331e-ee4c-43a3-98ca-e6afa12ca809 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.328515] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21814efc-522a-40f8-b9b6-b0f41e007093 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.375847] env[69994]: DEBUG nova.compute.manager [req-20fed48e-b9ac-49ce-a687-13d06e7aca54 req-3d52e00b-8faf-40e5-b7bd-4957dc2a6e5f service nova] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Detach interface failed, port_id=ea997e7f-79b4-40ab-bd44-2882bc9ec8fa, reason: Instance 80705dfe-4768-4f35-8acf-316b15814f78 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 932.506373] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-97c859cb-8c47-42e2-8090-bf313e4b6e88 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 932.506771] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1fc781b5-a13c-4bad-837a-82d1685986c3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.515500] env[69994]: DEBUG oslo_vmware.api [None req-97c859cb-8c47-42e2-8090-bf313e4b6e88 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 932.515500] env[69994]: value = "task-3242112" [ 932.515500] env[69994]: _type = "Task" [ 932.515500] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.528690] env[69994]: DEBUG oslo_vmware.api [None req-97c859cb-8c47-42e2-8090-bf313e4b6e88 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242112, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.622016] env[69994]: DEBUG nova.virt.hardware [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 932.622016] env[69994]: DEBUG nova.virt.hardware [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 932.622016] env[69994]: DEBUG nova.virt.hardware [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 932.622016] env[69994]: DEBUG nova.virt.hardware [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 932.622016] env[69994]: DEBUG nova.virt.hardware [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 932.622016] env[69994]: DEBUG nova.virt.hardware [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 932.622016] env[69994]: DEBUG nova.virt.hardware [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 932.622016] env[69994]: DEBUG nova.virt.hardware [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 932.623530] env[69994]: DEBUG nova.virt.hardware [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 932.623881] env[69994]: DEBUG nova.virt.hardware [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 932.624247] env[69994]: DEBUG nova.virt.hardware [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 932.625705] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14fddac2-e39f-4b73-b730-4891dd944371 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.638063] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e0708a6-1885-4fd2-8ceb-9aacea6debfb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.653628] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Instance VIF info [] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 932.663016] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 932.663346] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 932.663570] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-22be9390-419a-48c9-9c53-3efe716103f3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.683910] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ba4e5b8a-4002-4176-90ae-a78a49bbee6b tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 932.687134] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.027s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 932.687134] env[69994]: DEBUG nova.objects.instance [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Lazy-loading 'resources' on Instance uuid 4ca53416-caed-418c-bb40-cabb8e311803 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 932.688078] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 932.688078] env[69994]: value = "task-3242113" [ 932.688078] env[69994]: _type = "Task" [ 932.688078] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.698261] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242113, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.724939] env[69994]: INFO nova.compute.manager [-] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Took 1.32 seconds to deallocate network for instance. [ 932.819652] env[69994]: DEBUG nova.network.neutron [-] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.027391] env[69994]: DEBUG oslo_vmware.api [None req-97c859cb-8c47-42e2-8090-bf313e4b6e88 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242112, 'name': PowerOnVM_Task, 'duration_secs': 0.443047} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.027836] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-97c859cb-8c47-42e2-8090-bf313e4b6e88 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 933.028415] env[69994]: DEBUG nova.compute.manager [None req-97c859cb-8c47-42e2-8090-bf313e4b6e88 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 933.029702] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7fdaedd-69a7-453c-a102-303b9ce047a8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.208251] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242113, 'name': CreateVM_Task, 'duration_secs': 0.350624} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.211212] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 933.212254] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.212674] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 933.215518] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 933.215518] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b64ad68-6cd2-44e3-a22e-4b203f3f195e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.221465] env[69994]: DEBUG oslo_vmware.api [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Waiting for the task: (returnval){ [ 933.221465] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]521d3624-5df5-3c0e-14c7-de4df0eec589" [ 933.221465] env[69994]: _type = "Task" [ 933.221465] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.232099] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59c5e4cb-172c-45f9-8487-0b04e3a163a9 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 933.236496] env[69994]: DEBUG oslo_vmware.api [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521d3624-5df5-3c0e-14c7-de4df0eec589, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.324007] env[69994]: INFO nova.compute.manager [-] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Took 1.56 seconds to deallocate network for instance. [ 933.617225] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18dadef4-43bd-4ec4-a7cc-50b46c1c94e4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.626405] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8fa6342-ccbe-495c-9a2c-825f0beaf8f2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.670343] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ef7cd8f-3bff-4ffb-a6f5-6a0962c55642 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.678115] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "4b3addd0-22b0-4793-af75-dba381c4a83f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 933.678424] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "4b3addd0-22b0-4793-af75-dba381c4a83f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 933.685336] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-507921cf-0847-4ace-950a-d6722018c58a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.705088] env[69994]: DEBUG nova.compute.provider_tree [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 933.735625] env[69994]: DEBUG oslo_vmware.api [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521d3624-5df5-3c0e-14c7-de4df0eec589, 'name': SearchDatastore_Task, 'duration_secs': 0.017002} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.735625] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 933.735625] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 933.735625] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.735625] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 933.735944] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 933.736304] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-64416155-8237-443d-9fcf-124b4d36f751 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.753353] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 933.753528] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 933.754683] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0fcdcd3f-aeb7-4888-aa48-09b25d74bbbf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.761471] env[69994]: DEBUG oslo_vmware.api [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Waiting for the task: (returnval){ [ 933.761471] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]524899b6-38a0-796e-5b2e-abc2218f4b80" [ 933.761471] env[69994]: _type = "Task" [ 933.761471] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.772451] env[69994]: DEBUG oslo_vmware.api [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]524899b6-38a0-796e-5b2e-abc2218f4b80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.833241] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e6d9089e-8206-49fa-a687-343cf7342d36 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 933.939386] env[69994]: DEBUG nova.compute.manager [req-e9372787-41ea-4e3c-a49a-c42b39c0c58d req-800d38b9-3f95-4888-8c2c-2a6004b63e7a service nova] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Received event network-vif-deleted-58907f85-0b65-4837-9d8e-da4ed1cf1be6 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 934.039487] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Acquiring lock "29071eb9-6334-4c23-acb4-142c12aa448d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 934.039487] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "29071eb9-6334-4c23-acb4-142c12aa448d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 934.183635] env[69994]: DEBUG nova.compute.manager [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 934.228031] env[69994]: ERROR nova.scheduler.client.report [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] [req-825ccf59-8e1c-48fd-933b-9c0b2a4de482] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 92ce3c95-4efe-4d04-802b-6b187afc5aa7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-825ccf59-8e1c-48fd-933b-9c0b2a4de482"}]} [ 934.244928] env[69994]: DEBUG nova.scheduler.client.report [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Refreshing inventories for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 934.261703] env[69994]: DEBUG nova.scheduler.client.report [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Updating ProviderTree inventory for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 934.261930] env[69994]: DEBUG nova.compute.provider_tree [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 934.274347] env[69994]: DEBUG oslo_vmware.api [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]524899b6-38a0-796e-5b2e-abc2218f4b80, 'name': SearchDatastore_Task, 'duration_secs': 0.019605} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.275252] env[69994]: DEBUG nova.scheduler.client.report [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Refreshing aggregate associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 934.278017] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b294d7a-0997-43a9-bf3b-ffa8bc80d048 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.280430] env[69994]: DEBUG oslo_concurrency.lockutils [None req-61a49f7a-8365-4214-b4f6-59f126849215 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "45a8dced-6c49-441c-92e2-ee323ed8753c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 934.280670] env[69994]: DEBUG oslo_concurrency.lockutils [None req-61a49f7a-8365-4214-b4f6-59f126849215 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "45a8dced-6c49-441c-92e2-ee323ed8753c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 934.280894] env[69994]: DEBUG oslo_concurrency.lockutils [None req-61a49f7a-8365-4214-b4f6-59f126849215 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "45a8dced-6c49-441c-92e2-ee323ed8753c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 934.281076] env[69994]: DEBUG oslo_concurrency.lockutils [None req-61a49f7a-8365-4214-b4f6-59f126849215 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "45a8dced-6c49-441c-92e2-ee323ed8753c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 934.281213] env[69994]: DEBUG oslo_concurrency.lockutils [None req-61a49f7a-8365-4214-b4f6-59f126849215 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "45a8dced-6c49-441c-92e2-ee323ed8753c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 934.283532] env[69994]: INFO nova.compute.manager [None req-61a49f7a-8365-4214-b4f6-59f126849215 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Terminating instance [ 934.286965] env[69994]: DEBUG oslo_vmware.api [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Waiting for the task: (returnval){ [ 934.286965] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52383101-ea2f-8be9-5c49-a13ba84b73f7" [ 934.286965] env[69994]: _type = "Task" [ 934.286965] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.295466] env[69994]: DEBUG oslo_vmware.api [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52383101-ea2f-8be9-5c49-a13ba84b73f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.296356] env[69994]: DEBUG nova.scheduler.client.report [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Refreshing trait associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 934.540800] env[69994]: DEBUG nova.compute.manager [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 934.617231] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cab8d9f8-e7b8-45b2-bc6b-94a45ac438d4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.626971] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-938a501f-8a05-4702-af47-009fe604a18e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.675072] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95bf0a4d-861d-4eda-9509-28f6bb20faca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.685711] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13923e4a-d8f5-4837-83eb-cf3f35342197 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.703221] env[69994]: DEBUG nova.compute.provider_tree [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 934.709263] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 934.788986] env[69994]: DEBUG nova.compute.manager [None req-61a49f7a-8365-4214-b4f6-59f126849215 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 934.789345] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-61a49f7a-8365-4214-b4f6-59f126849215 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 934.790517] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60f181b5-1e71-4f31-aa75-1075b80f561f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.802625] env[69994]: DEBUG oslo_vmware.api [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52383101-ea2f-8be9-5c49-a13ba84b73f7, 'name': SearchDatastore_Task, 'duration_secs': 0.014899} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.804703] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 934.804986] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 17389887-5463-44e1-b1c0-f123d8dedec7/17389887-5463-44e1-b1c0-f123d8dedec7.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 934.805320] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-61a49f7a-8365-4214-b4f6-59f126849215 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 934.805535] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fa65ac16-69df-4ad3-822d-8162c2f563ae {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.807820] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b4bd60ed-5dfa-4d55-ac0f-dc092e08a7b6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.814270] env[69994]: DEBUG oslo_vmware.api [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Waiting for the task: (returnval){ [ 934.814270] env[69994]: value = "task-3242115" [ 934.814270] env[69994]: _type = "Task" [ 934.814270] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.815617] env[69994]: DEBUG oslo_vmware.api [None req-61a49f7a-8365-4214-b4f6-59f126849215 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 934.815617] env[69994]: value = "task-3242114" [ 934.815617] env[69994]: _type = "Task" [ 934.815617] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.828232] env[69994]: DEBUG oslo_vmware.api [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': task-3242115, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.831831] env[69994]: DEBUG oslo_vmware.api [None req-61a49f7a-8365-4214-b4f6-59f126849215 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242114, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.065239] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 935.228284] env[69994]: ERROR nova.scheduler.client.report [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] [req-f6bb3a44-0be1-4e89-b0d7-9b9f1c35f11b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 92ce3c95-4efe-4d04-802b-6b187afc5aa7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f6bb3a44-0be1-4e89-b0d7-9b9f1c35f11b"}]} [ 935.246460] env[69994]: DEBUG nova.scheduler.client.report [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Refreshing inventories for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 935.263077] env[69994]: DEBUG nova.scheduler.client.report [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Updating ProviderTree inventory for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 935.263330] env[69994]: DEBUG nova.compute.provider_tree [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 935.276754] env[69994]: DEBUG nova.scheduler.client.report [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Refreshing aggregate associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 935.296303] env[69994]: DEBUG nova.scheduler.client.report [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Refreshing trait associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 935.334432] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Acquiring lock "ee7e0c02-ef19-4475-a936-f591c8185797" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 935.334432] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Lock "ee7e0c02-ef19-4475-a936-f591c8185797" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 935.335399] env[69994]: DEBUG oslo_vmware.api [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': task-3242115, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.474316} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.338956] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 17389887-5463-44e1-b1c0-f123d8dedec7/17389887-5463-44e1-b1c0-f123d8dedec7.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 935.339183] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 935.339696] env[69994]: DEBUG oslo_vmware.api [None req-61a49f7a-8365-4214-b4f6-59f126849215 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242114, 'name': PowerOffVM_Task, 'duration_secs': 0.197178} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.339897] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eb9fdaba-034f-45f5-b923-61dba485cefc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.341778] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-61a49f7a-8365-4214-b4f6-59f126849215 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 935.341954] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-61a49f7a-8365-4214-b4f6-59f126849215 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 935.343113] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c35b1a64-d115-47a7-8552-ce27a6d43ccf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.355757] env[69994]: DEBUG oslo_vmware.api [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Waiting for the task: (returnval){ [ 935.355757] env[69994]: value = "task-3242116" [ 935.355757] env[69994]: _type = "Task" [ 935.355757] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.364479] env[69994]: DEBUG oslo_vmware.api [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': task-3242116, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.426356] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-61a49f7a-8365-4214-b4f6-59f126849215 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 935.426672] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-61a49f7a-8365-4214-b4f6-59f126849215 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 935.427017] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-61a49f7a-8365-4214-b4f6-59f126849215 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Deleting the datastore file [datastore1] 45a8dced-6c49-441c-92e2-ee323ed8753c {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 935.427367] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0198679f-fe25-4f2c-ab86-4048f82386d3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.437286] env[69994]: DEBUG oslo_vmware.api [None req-61a49f7a-8365-4214-b4f6-59f126849215 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 935.437286] env[69994]: value = "task-3242118" [ 935.437286] env[69994]: _type = "Task" [ 935.437286] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.446808] env[69994]: DEBUG oslo_vmware.api [None req-61a49f7a-8365-4214-b4f6-59f126849215 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242118, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.627562] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd384bae-122d-49ca-adf5-3b92aae0babf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.635453] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b211fe23-44ae-4318-9a1f-98d3482617dc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.666931] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db5e6735-1ff7-47dc-8224-10d3e532ae4f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.675400] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0175d27-5b56-4061-a85f-c29d085ec873 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.689089] env[69994]: DEBUG nova.compute.provider_tree [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 935.841111] env[69994]: DEBUG nova.compute.manager [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 935.866154] env[69994]: DEBUG oslo_vmware.api [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': task-3242116, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078605} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.866433] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 935.867274] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cc34c60-743e-4619-afda-8fccd5bd5ab1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.889011] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] 17389887-5463-44e1-b1c0-f123d8dedec7/17389887-5463-44e1-b1c0-f123d8dedec7.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 935.890030] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bf93aebf-1073-4796-b240-d63212ba721e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.911813] env[69994]: DEBUG oslo_vmware.api [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Waiting for the task: (returnval){ [ 935.911813] env[69994]: value = "task-3242119" [ 935.911813] env[69994]: _type = "Task" [ 935.911813] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.920955] env[69994]: DEBUG oslo_vmware.api [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': task-3242119, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.949867] env[69994]: DEBUG oslo_vmware.api [None req-61a49f7a-8365-4214-b4f6-59f126849215 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242118, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152895} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.950301] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-61a49f7a-8365-4214-b4f6-59f126849215 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 935.950604] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-61a49f7a-8365-4214-b4f6-59f126849215 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 935.950889] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-61a49f7a-8365-4214-b4f6-59f126849215 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 935.951211] env[69994]: INFO nova.compute.manager [None req-61a49f7a-8365-4214-b4f6-59f126849215 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Took 1.16 seconds to destroy the instance on the hypervisor. [ 935.951607] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-61a49f7a-8365-4214-b4f6-59f126849215 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 935.951908] env[69994]: DEBUG nova.compute.manager [-] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 935.952189] env[69994]: DEBUG nova.network.neutron [-] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 936.192664] env[69994]: DEBUG nova.scheduler.client.report [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 936.358448] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 936.423444] env[69994]: DEBUG oslo_vmware.api [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': task-3242119, 'name': ReconfigVM_Task, 'duration_secs': 0.344192} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.423755] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Reconfigured VM instance instance-00000048 to attach disk [datastore2] 17389887-5463-44e1-b1c0-f123d8dedec7/17389887-5463-44e1-b1c0-f123d8dedec7.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 936.424617] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-65d13e1e-c15f-4cdd-9dce-3939b7659a5f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.432876] env[69994]: DEBUG oslo_vmware.api [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Waiting for the task: (returnval){ [ 936.432876] env[69994]: value = "task-3242120" [ 936.432876] env[69994]: _type = "Task" [ 936.432876] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.442905] env[69994]: DEBUG oslo_vmware.api [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': task-3242120, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.687333] env[69994]: DEBUG nova.compute.manager [req-a34de7a5-7262-461d-9575-110c92ec9903 req-32fad6dc-c58b-4a7b-b812-45760d77a6a7 service nova] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Received event network-vif-deleted-003af7d4-a8a5-43d4-b032-96df0b4ae173 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 936.687566] env[69994]: INFO nova.compute.manager [req-a34de7a5-7262-461d-9575-110c92ec9903 req-32fad6dc-c58b-4a7b-b812-45760d77a6a7 service nova] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Neutron deleted interface 003af7d4-a8a5-43d4-b032-96df0b4ae173; detaching it from the instance and deleting it from the info cache [ 936.687741] env[69994]: DEBUG nova.network.neutron [req-a34de7a5-7262-461d-9575-110c92ec9903 req-32fad6dc-c58b-4a7b-b812-45760d77a6a7 service nova] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.697687] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 4.011s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 936.700187] env[69994]: DEBUG oslo_concurrency.lockutils [None req-286de424-ef7a-47bd-b88c-8b3a6202c4a6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.987s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 936.700992] env[69994]: DEBUG nova.objects.instance [None req-286de424-ef7a-47bd-b88c-8b3a6202c4a6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lazy-loading 'resources' on Instance uuid ff645ae7-940e-4842-8915-a96d36d08067 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 936.724192] env[69994]: INFO nova.scheduler.client.report [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Deleted allocations for instance 4ca53416-caed-418c-bb40-cabb8e311803 [ 936.948646] env[69994]: DEBUG oslo_vmware.api [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': task-3242120, 'name': Rename_Task, 'duration_secs': 0.149664} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.948920] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 936.949169] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-07ab1ba6-1baf-4549-9e1d-720272fd18e2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.957473] env[69994]: DEBUG oslo_vmware.api [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Waiting for the task: (returnval){ [ 936.957473] env[69994]: value = "task-3242121" [ 936.957473] env[69994]: _type = "Task" [ 936.957473] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.966076] env[69994]: DEBUG oslo_vmware.api [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': task-3242121, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.137046] env[69994]: DEBUG nova.network.neutron [-] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.191467] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dcaa89da-9917-49bb-954e-ff166ab20bb5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.203691] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fea23fc-a6b2-4847-9ccf-afb5d55134a5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.234330] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ceb070dd-aab3-4992-b7c3-2478b7e2e619 tempest-ServerMetadataTestJSON-231785587 tempest-ServerMetadataTestJSON-231785587-project-member] Lock "4ca53416-caed-418c-bb40-cabb8e311803" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.964s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 937.253106] env[69994]: DEBUG nova.compute.manager [req-a34de7a5-7262-461d-9575-110c92ec9903 req-32fad6dc-c58b-4a7b-b812-45760d77a6a7 service nova] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Detach interface failed, port_id=003af7d4-a8a5-43d4-b032-96df0b4ae173, reason: Instance 45a8dced-6c49-441c-92e2-ee323ed8753c could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 937.443382] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Acquiring lock "6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 937.443664] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Lock "6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 937.443845] env[69994]: INFO nova.compute.manager [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Shelving [ 937.468953] env[69994]: DEBUG oslo_vmware.api [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': task-3242121, 'name': PowerOnVM_Task, 'duration_secs': 0.424522} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.472190] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 937.472357] env[69994]: DEBUG nova.compute.manager [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 937.473933] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c8a8146-f961-4c3b-95b6-55d92f81984b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.543918] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73aabff6-403a-43b3-a9f2-7c9803468240 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.552197] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5277631-06f5-44ce-99c2-6cfd6b7b0540 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.585020] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23a13312-2ea9-4e96-b54b-26ba8963c391 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.593453] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40909bb6-af82-476a-b77f-b20f75637b4f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.607143] env[69994]: DEBUG nova.compute.provider_tree [None req-286de424-ef7a-47bd-b88c-8b3a6202c4a6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 937.640135] env[69994]: INFO nova.compute.manager [-] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Took 1.69 seconds to deallocate network for instance. [ 937.993477] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 938.111690] env[69994]: DEBUG nova.scheduler.client.report [None req-286de424-ef7a-47bd-b88c-8b3a6202c4a6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 938.147777] env[69994]: DEBUG oslo_concurrency.lockutils [None req-61a49f7a-8365-4214-b4f6-59f126849215 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 938.454979] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 938.455725] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-78ffbbfd-8d3d-4b81-8383-f06337ac13a2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.465518] env[69994]: DEBUG oslo_vmware.api [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for the task: (returnval){ [ 938.465518] env[69994]: value = "task-3242122" [ 938.465518] env[69994]: _type = "Task" [ 938.465518] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.476273] env[69994]: DEBUG oslo_vmware.api [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242122, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.616821] env[69994]: DEBUG oslo_concurrency.lockutils [None req-286de424-ef7a-47bd-b88c-8b3a6202c4a6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.917s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 938.620586] env[69994]: DEBUG oslo_concurrency.lockutils [None req-37d94b9c-e15f-4db7-b5e7-3b0ab480463b tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.667s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 938.620867] env[69994]: DEBUG nova.objects.instance [None req-37d94b9c-e15f-4db7-b5e7-3b0ab480463b tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Lazy-loading 'resources' on Instance uuid f07750f5-3f1d-4d97-98dc-285ed357cc7e {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 938.656283] env[69994]: INFO nova.scheduler.client.report [None req-286de424-ef7a-47bd-b88c-8b3a6202c4a6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Deleted allocations for instance ff645ae7-940e-4842-8915-a96d36d08067 [ 938.911554] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b131ea34-06f7-4395-9c79-ed0c6557c94d tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Acquiring lock "17389887-5463-44e1-b1c0-f123d8dedec7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 938.911971] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b131ea34-06f7-4395-9c79-ed0c6557c94d tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Lock "17389887-5463-44e1-b1c0-f123d8dedec7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 938.912226] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b131ea34-06f7-4395-9c79-ed0c6557c94d tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Acquiring lock "17389887-5463-44e1-b1c0-f123d8dedec7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 938.912454] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b131ea34-06f7-4395-9c79-ed0c6557c94d tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Lock "17389887-5463-44e1-b1c0-f123d8dedec7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 938.912790] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b131ea34-06f7-4395-9c79-ed0c6557c94d tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Lock "17389887-5463-44e1-b1c0-f123d8dedec7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 938.915052] env[69994]: INFO nova.compute.manager [None req-b131ea34-06f7-4395-9c79-ed0c6557c94d tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Terminating instance [ 938.978489] env[69994]: DEBUG oslo_vmware.api [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242122, 'name': PowerOffVM_Task, 'duration_secs': 0.269758} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.978818] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 938.979941] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa00f53e-4fca-4486-ae94-012c0d73f73e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.001124] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-841c2ed1-4e24-4e91-8960-06961450a149 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.165369] env[69994]: DEBUG oslo_concurrency.lockutils [None req-286de424-ef7a-47bd-b88c-8b3a6202c4a6 tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "ff645ae7-940e-4842-8915-a96d36d08067" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.823s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 939.403994] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86b8d875-9a6b-4e46-a688-986408e0a9c8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.412340] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa0e5b1-48a9-49b9-b5ad-42e8ba8f4ece {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.418551] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b131ea34-06f7-4395-9c79-ed0c6557c94d tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Acquiring lock "refresh_cache-17389887-5463-44e1-b1c0-f123d8dedec7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.418754] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b131ea34-06f7-4395-9c79-ed0c6557c94d tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Acquired lock "refresh_cache-17389887-5463-44e1-b1c0-f123d8dedec7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 939.418975] env[69994]: DEBUG nova.network.neutron [None req-b131ea34-06f7-4395-9c79-ed0c6557c94d tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 939.447699] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b54d750-941b-4058-bddb-e4ac1b1706c9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.456630] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c38a095-1654-431c-a3ea-d7daa1469cd2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.473485] env[69994]: DEBUG nova.compute.provider_tree [None req-37d94b9c-e15f-4db7-b5e7-3b0ab480463b tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 939.513686] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Creating Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 939.513818] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-031a6c46-d057-426f-80b4-c8373cec9a92 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.525312] env[69994]: DEBUG oslo_vmware.api [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for the task: (returnval){ [ 939.525312] env[69994]: value = "task-3242123" [ 939.525312] env[69994]: _type = "Task" [ 939.525312] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.535726] env[69994]: DEBUG oslo_vmware.api [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242123, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.965107] env[69994]: DEBUG nova.network.neutron [None req-b131ea34-06f7-4395-9c79-ed0c6557c94d tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 939.978992] env[69994]: DEBUG nova.scheduler.client.report [None req-37d94b9c-e15f-4db7-b5e7-3b0ab480463b tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 939.982931] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fb176ce9-e6b0-4b69-85eb-b9bc37a169fa tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquiring lock "566522b0-7aa7-4552-9be7-035d742ba394" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 939.983239] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fb176ce9-e6b0-4b69-85eb-b9bc37a169fa tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "566522b0-7aa7-4552-9be7-035d742ba394" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 939.983473] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fb176ce9-e6b0-4b69-85eb-b9bc37a169fa tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquiring lock "566522b0-7aa7-4552-9be7-035d742ba394-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 939.983681] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fb176ce9-e6b0-4b69-85eb-b9bc37a169fa tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "566522b0-7aa7-4552-9be7-035d742ba394-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 939.983871] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fb176ce9-e6b0-4b69-85eb-b9bc37a169fa tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "566522b0-7aa7-4552-9be7-035d742ba394-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 939.986380] env[69994]: INFO nova.compute.manager [None req-fb176ce9-e6b0-4b69-85eb-b9bc37a169fa tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Terminating instance [ 940.019028] env[69994]: DEBUG nova.network.neutron [None req-b131ea34-06f7-4395-9c79-ed0c6557c94d tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.036440] env[69994]: DEBUG oslo_vmware.api [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242123, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.096734] env[69994]: DEBUG oslo_concurrency.lockutils [None req-edff0ddf-e5ac-4235-9a8c-c51e723671e8 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Acquiring lock "a4544bc9-6935-4825-9b45-2054d2ced330" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 940.096734] env[69994]: DEBUG oslo_concurrency.lockutils [None req-edff0ddf-e5ac-4235-9a8c-c51e723671e8 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Lock "a4544bc9-6935-4825-9b45-2054d2ced330" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 940.097039] env[69994]: DEBUG oslo_concurrency.lockutils [None req-edff0ddf-e5ac-4235-9a8c-c51e723671e8 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Acquiring lock "a4544bc9-6935-4825-9b45-2054d2ced330-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 940.097183] env[69994]: DEBUG oslo_concurrency.lockutils [None req-edff0ddf-e5ac-4235-9a8c-c51e723671e8 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Lock "a4544bc9-6935-4825-9b45-2054d2ced330-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 940.097437] env[69994]: DEBUG oslo_concurrency.lockutils [None req-edff0ddf-e5ac-4235-9a8c-c51e723671e8 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Lock "a4544bc9-6935-4825-9b45-2054d2ced330-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 940.100236] env[69994]: INFO nova.compute.manager [None req-edff0ddf-e5ac-4235-9a8c-c51e723671e8 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Terminating instance [ 940.487407] env[69994]: DEBUG oslo_concurrency.lockutils [None req-37d94b9c-e15f-4db7-b5e7-3b0ab480463b tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.867s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 940.490107] env[69994]: DEBUG oslo_concurrency.lockutils [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.873s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 940.491699] env[69994]: INFO nova.compute.claims [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 940.494508] env[69994]: DEBUG nova.compute.manager [None req-fb176ce9-e6b0-4b69-85eb-b9bc37a169fa tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 940.494720] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fb176ce9-e6b0-4b69-85eb-b9bc37a169fa tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 940.495772] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15d2d6df-ce5d-4ac0-948b-238167d04e36 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.504642] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb176ce9-e6b0-4b69-85eb-b9bc37a169fa tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 940.504906] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f13b2140-dd14-48b5-9d83-08817f8df5b3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.512979] env[69994]: DEBUG oslo_vmware.api [None req-fb176ce9-e6b0-4b69-85eb-b9bc37a169fa tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 940.512979] env[69994]: value = "task-3242124" [ 940.512979] env[69994]: _type = "Task" [ 940.512979] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.516903] env[69994]: INFO nova.scheduler.client.report [None req-37d94b9c-e15f-4db7-b5e7-3b0ab480463b tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Deleted allocations for instance f07750f5-3f1d-4d97-98dc-285ed357cc7e [ 940.523905] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b131ea34-06f7-4395-9c79-ed0c6557c94d tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Releasing lock "refresh_cache-17389887-5463-44e1-b1c0-f123d8dedec7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 940.524417] env[69994]: DEBUG nova.compute.manager [None req-b131ea34-06f7-4395-9c79-ed0c6557c94d tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 940.524566] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b131ea34-06f7-4395-9c79-ed0c6557c94d tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 940.524847] env[69994]: DEBUG oslo_vmware.api [None req-fb176ce9-e6b0-4b69-85eb-b9bc37a169fa tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242124, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.525827] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d3e57e4-ead7-41f2-b0ee-781fd5ea437e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.536756] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b131ea34-06f7-4395-9c79-ed0c6557c94d tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 940.539945] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-007adaa7-7ce4-4e08-9165-5bc1fd4afe18 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.541770] env[69994]: DEBUG oslo_vmware.api [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242123, 'name': CreateSnapshot_Task, 'duration_secs': 0.755997} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.542076] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Created Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 940.543067] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69918c6c-20cc-48e7-ab2c-214218364de3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.548696] env[69994]: DEBUG oslo_vmware.api [None req-b131ea34-06f7-4395-9c79-ed0c6557c94d tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Waiting for the task: (returnval){ [ 940.548696] env[69994]: value = "task-3242125" [ 940.548696] env[69994]: _type = "Task" [ 940.548696] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.562848] env[69994]: DEBUG oslo_vmware.api [None req-b131ea34-06f7-4395-9c79-ed0c6557c94d tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': task-3242125, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.604339] env[69994]: DEBUG nova.compute.manager [None req-edff0ddf-e5ac-4235-9a8c-c51e723671e8 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 940.604625] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-edff0ddf-e5ac-4235-9a8c-c51e723671e8 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 940.605605] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-669d5682-f03a-4398-89d9-d66099f7fea0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.615448] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-edff0ddf-e5ac-4235-9a8c-c51e723671e8 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 940.615721] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-48ce10e5-2af1-4fdf-b07f-014ef3f53991 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.624342] env[69994]: DEBUG oslo_vmware.api [None req-edff0ddf-e5ac-4235-9a8c-c51e723671e8 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Waiting for the task: (returnval){ [ 940.624342] env[69994]: value = "task-3242126" [ 940.624342] env[69994]: _type = "Task" [ 940.624342] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.634453] env[69994]: DEBUG oslo_vmware.api [None req-edff0ddf-e5ac-4235-9a8c-c51e723671e8 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3242126, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.024364] env[69994]: DEBUG oslo_vmware.api [None req-fb176ce9-e6b0-4b69-85eb-b9bc37a169fa tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242124, 'name': PowerOffVM_Task, 'duration_secs': 0.290556} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.024873] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb176ce9-e6b0-4b69-85eb-b9bc37a169fa tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 941.025197] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fb176ce9-e6b0-4b69-85eb-b9bc37a169fa tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 941.027596] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f5c771ad-86c8-4f26-8848-27d078812e99 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.029612] env[69994]: DEBUG oslo_concurrency.lockutils [None req-37d94b9c-e15f-4db7-b5e7-3b0ab480463b tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Lock "f07750f5-3f1d-4d97-98dc-285ed357cc7e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.676s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 941.067066] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Creating linked-clone VM from snapshot {{(pid=69994) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 941.067066] env[69994]: DEBUG oslo_vmware.api [None req-b131ea34-06f7-4395-9c79-ed0c6557c94d tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': task-3242125, 'name': PowerOffVM_Task, 'duration_secs': 0.118583} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.067066] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-e4a8c1a7-6669-4fb8-a53c-d3a642d66776 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.069786] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b131ea34-06f7-4395-9c79-ed0c6557c94d tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 941.070128] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b131ea34-06f7-4395-9c79-ed0c6557c94d tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 941.070468] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f3e50d42-c804-4fa1-9375-243d690e0fef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.078642] env[69994]: DEBUG oslo_vmware.api [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for the task: (returnval){ [ 941.078642] env[69994]: value = "task-3242129" [ 941.078642] env[69994]: _type = "Task" [ 941.078642] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.087568] env[69994]: DEBUG oslo_vmware.api [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242129, 'name': CloneVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.103698] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b131ea34-06f7-4395-9c79-ed0c6557c94d tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 941.103698] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b131ea34-06f7-4395-9c79-ed0c6557c94d tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 941.103698] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b131ea34-06f7-4395-9c79-ed0c6557c94d tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Deleting the datastore file [datastore2] 17389887-5463-44e1-b1c0-f123d8dedec7 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 941.103698] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ec8aff60-e555-4011-8c3a-6ce2e2d087f7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.106175] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fb176ce9-e6b0-4b69-85eb-b9bc37a169fa tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 941.106379] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fb176ce9-e6b0-4b69-85eb-b9bc37a169fa tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 941.106552] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb176ce9-e6b0-4b69-85eb-b9bc37a169fa tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Deleting the datastore file [datastore1] 566522b0-7aa7-4552-9be7-035d742ba394 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 941.107221] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9d74951f-0445-46cc-8b03-55f7c9e1c6a4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.111981] env[69994]: DEBUG oslo_vmware.api [None req-b131ea34-06f7-4395-9c79-ed0c6557c94d tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Waiting for the task: (returnval){ [ 941.111981] env[69994]: value = "task-3242130" [ 941.111981] env[69994]: _type = "Task" [ 941.111981] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.116647] env[69994]: DEBUG oslo_vmware.api [None req-fb176ce9-e6b0-4b69-85eb-b9bc37a169fa tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 941.116647] env[69994]: value = "task-3242131" [ 941.116647] env[69994]: _type = "Task" [ 941.116647] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.122951] env[69994]: DEBUG oslo_vmware.api [None req-b131ea34-06f7-4395-9c79-ed0c6557c94d tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': task-3242130, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.127906] env[69994]: DEBUG oslo_vmware.api [None req-fb176ce9-e6b0-4b69-85eb-b9bc37a169fa tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242131, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.135664] env[69994]: DEBUG oslo_vmware.api [None req-edff0ddf-e5ac-4235-9a8c-c51e723671e8 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3242126, 'name': PowerOffVM_Task, 'duration_secs': 0.219597} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.135910] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-edff0ddf-e5ac-4235-9a8c-c51e723671e8 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 941.136092] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-edff0ddf-e5ac-4235-9a8c-c51e723671e8 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 941.136338] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e2bb86ea-2346-41d8-8add-20a9be8915cc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.217366] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-edff0ddf-e5ac-4235-9a8c-c51e723671e8 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 941.217762] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-edff0ddf-e5ac-4235-9a8c-c51e723671e8 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 941.218070] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-edff0ddf-e5ac-4235-9a8c-c51e723671e8 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Deleting the datastore file [datastore1] a4544bc9-6935-4825-9b45-2054d2ced330 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 941.218524] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5b8219f6-1296-4ae6-ad48-301df3cf6266 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.227892] env[69994]: DEBUG oslo_vmware.api [None req-edff0ddf-e5ac-4235-9a8c-c51e723671e8 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Waiting for the task: (returnval){ [ 941.227892] env[69994]: value = "task-3242133" [ 941.227892] env[69994]: _type = "Task" [ 941.227892] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.236844] env[69994]: DEBUG oslo_vmware.api [None req-edff0ddf-e5ac-4235-9a8c-c51e723671e8 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3242133, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.590798] env[69994]: DEBUG oslo_vmware.api [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242129, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.628296] env[69994]: DEBUG oslo_vmware.api [None req-b131ea34-06f7-4395-9c79-ed0c6557c94d tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Task: {'id': task-3242130, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.110941} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.635373] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b131ea34-06f7-4395-9c79-ed0c6557c94d tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 941.635587] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b131ea34-06f7-4395-9c79-ed0c6557c94d tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 941.635769] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b131ea34-06f7-4395-9c79-ed0c6557c94d tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 941.635947] env[69994]: INFO nova.compute.manager [None req-b131ea34-06f7-4395-9c79-ed0c6557c94d tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Took 1.11 seconds to destroy the instance on the hypervisor. [ 941.636214] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b131ea34-06f7-4395-9c79-ed0c6557c94d tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 941.636434] env[69994]: DEBUG oslo_vmware.api [None req-fb176ce9-e6b0-4b69-85eb-b9bc37a169fa tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242131, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171193} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.636917] env[69994]: DEBUG nova.compute.manager [-] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 941.637173] env[69994]: DEBUG nova.network.neutron [-] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 941.638981] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb176ce9-e6b0-4b69-85eb-b9bc37a169fa tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 941.639181] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fb176ce9-e6b0-4b69-85eb-b9bc37a169fa tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 941.639434] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fb176ce9-e6b0-4b69-85eb-b9bc37a169fa tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 941.639590] env[69994]: INFO nova.compute.manager [None req-fb176ce9-e6b0-4b69-85eb-b9bc37a169fa tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Took 1.14 seconds to destroy the instance on the hypervisor. [ 941.639833] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fb176ce9-e6b0-4b69-85eb-b9bc37a169fa tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 941.640103] env[69994]: DEBUG nova.compute.manager [-] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 941.640206] env[69994]: DEBUG nova.network.neutron [-] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 941.663296] env[69994]: DEBUG nova.network.neutron [-] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 941.739701] env[69994]: DEBUG oslo_vmware.api [None req-edff0ddf-e5ac-4235-9a8c-c51e723671e8 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Task: {'id': task-3242133, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.199068} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.742476] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-edff0ddf-e5ac-4235-9a8c-c51e723671e8 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 941.742677] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-edff0ddf-e5ac-4235-9a8c-c51e723671e8 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 941.742855] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-edff0ddf-e5ac-4235-9a8c-c51e723671e8 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 941.743047] env[69994]: INFO nova.compute.manager [None req-edff0ddf-e5ac-4235-9a8c-c51e723671e8 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Took 1.14 seconds to destroy the instance on the hypervisor. [ 941.743310] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-edff0ddf-e5ac-4235-9a8c-c51e723671e8 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 941.743975] env[69994]: DEBUG nova.compute.manager [-] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 941.744092] env[69994]: DEBUG nova.network.neutron [-] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 941.814220] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3e273c2-9eb2-4d57-b796-0ea13e98f3b4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.824157] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3cd65a3-7eeb-415e-bc7e-facd10cbf464 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.855711] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98f697c5-cf52-43f7-94ce-bfd1f9a8b03f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.866745] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bbd067d-6220-4aee-8c48-476e02e4b171 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.880727] env[69994]: DEBUG nova.compute.provider_tree [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 941.888818] env[69994]: DEBUG oslo_concurrency.lockutils [None req-abd1bb36-78da-4bc9-bb56-673c0b9ea9e4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquiring lock "dbad6bed-64ba-4dfd-abad-c0b2c775ba2c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 941.889080] env[69994]: DEBUG oslo_concurrency.lockutils [None req-abd1bb36-78da-4bc9-bb56-673c0b9ea9e4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Lock "dbad6bed-64ba-4dfd-abad-c0b2c775ba2c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 941.889285] env[69994]: DEBUG oslo_concurrency.lockutils [None req-abd1bb36-78da-4bc9-bb56-673c0b9ea9e4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquiring lock "dbad6bed-64ba-4dfd-abad-c0b2c775ba2c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 941.889486] env[69994]: DEBUG oslo_concurrency.lockutils [None req-abd1bb36-78da-4bc9-bb56-673c0b9ea9e4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Lock "dbad6bed-64ba-4dfd-abad-c0b2c775ba2c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 941.889668] env[69994]: DEBUG oslo_concurrency.lockutils [None req-abd1bb36-78da-4bc9-bb56-673c0b9ea9e4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Lock "dbad6bed-64ba-4dfd-abad-c0b2c775ba2c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 941.892433] env[69994]: INFO nova.compute.manager [None req-abd1bb36-78da-4bc9-bb56-673c0b9ea9e4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Terminating instance [ 941.967566] env[69994]: DEBUG nova.compute.manager [req-db7233f9-3aea-4133-b96d-af42c9e070ae req-0d41a50b-413d-4d2f-9f79-c5aa6f2730f5 service nova] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Received event network-vif-deleted-b02b6f7a-67ae-46ce-aaa8-77cd472b8714 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 941.967566] env[69994]: INFO nova.compute.manager [req-db7233f9-3aea-4133-b96d-af42c9e070ae req-0d41a50b-413d-4d2f-9f79-c5aa6f2730f5 service nova] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Neutron deleted interface b02b6f7a-67ae-46ce-aaa8-77cd472b8714; detaching it from the instance and deleting it from the info cache [ 941.967566] env[69994]: DEBUG nova.network.neutron [req-db7233f9-3aea-4133-b96d-af42c9e070ae req-0d41a50b-413d-4d2f-9f79-c5aa6f2730f5 service nova] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.092446] env[69994]: DEBUG oslo_vmware.api [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242129, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.165301] env[69994]: DEBUG nova.network.neutron [-] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.198896] env[69994]: DEBUG nova.compute.manager [req-44b9f461-5f46-4f6a-908d-c791c6a50ab6 req-979eca2e-8234-42f6-8b84-c21f8528ce75 service nova] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Received event network-vif-deleted-7f7c4dd0-5c90-4dd1-8113-b871712bb2f7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 942.198896] env[69994]: INFO nova.compute.manager [req-44b9f461-5f46-4f6a-908d-c791c6a50ab6 req-979eca2e-8234-42f6-8b84-c21f8528ce75 service nova] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Neutron deleted interface 7f7c4dd0-5c90-4dd1-8113-b871712bb2f7; detaching it from the instance and deleting it from the info cache [ 942.198896] env[69994]: DEBUG nova.network.neutron [req-44b9f461-5f46-4f6a-908d-c791c6a50ab6 req-979eca2e-8234-42f6-8b84-c21f8528ce75 service nova] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.396235] env[69994]: DEBUG nova.compute.manager [None req-abd1bb36-78da-4bc9-bb56-673c0b9ea9e4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 942.399261] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-abd1bb36-78da-4bc9-bb56-673c0b9ea9e4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 942.399261] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b12ddd9a-0feb-44be-94ed-b4d517385471 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.405867] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-abd1bb36-78da-4bc9-bb56-673c0b9ea9e4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 942.407107] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6052d6c3-11eb-4a5d-a9d9-ffe249ec7b10 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.413241] env[69994]: DEBUG oslo_vmware.api [None req-abd1bb36-78da-4bc9-bb56-673c0b9ea9e4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for the task: (returnval){ [ 942.413241] env[69994]: value = "task-3242134" [ 942.413241] env[69994]: _type = "Task" [ 942.413241] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.417518] env[69994]: DEBUG nova.scheduler.client.report [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Updated inventory for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with generation 97 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 942.417910] env[69994]: DEBUG nova.compute.provider_tree [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Updating resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 generation from 97 to 98 during operation: update_inventory {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 942.418228] env[69994]: DEBUG nova.compute.provider_tree [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 942.428018] env[69994]: DEBUG oslo_vmware.api [None req-abd1bb36-78da-4bc9-bb56-673c0b9ea9e4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3242134, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.446642] env[69994]: DEBUG nova.network.neutron [-] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.470460] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-361e460a-186e-417f-940a-0b9cc1984196 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.481444] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee9c1d14-c232-40f3-8efd-6e9a1908ab90 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.516691] env[69994]: DEBUG nova.compute.manager [req-db7233f9-3aea-4133-b96d-af42c9e070ae req-0d41a50b-413d-4d2f-9f79-c5aa6f2730f5 service nova] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Detach interface failed, port_id=b02b6f7a-67ae-46ce-aaa8-77cd472b8714, reason: Instance 566522b0-7aa7-4552-9be7-035d742ba394 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 942.595625] env[69994]: DEBUG oslo_vmware.api [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242129, 'name': CloneVM_Task, 'duration_secs': 1.369063} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.596050] env[69994]: INFO nova.virt.vmwareapi.vmops [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Created linked-clone VM from snapshot [ 942.596616] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55ef0cde-10d7-4bd4-b7d9-0af1748a737d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.605648] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Uploading image 18590b17-addb-4605-8ce4-cb732b6f48da {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 942.633431] env[69994]: DEBUG oslo_vmware.rw_handles [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 942.633431] env[69994]: value = "vm-647941" [ 942.633431] env[69994]: _type = "VirtualMachine" [ 942.633431] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 942.633704] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-45de7082-ef50-4126-86de-805f24826b0b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.641105] env[69994]: DEBUG oslo_vmware.rw_handles [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Lease: (returnval){ [ 942.641105] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52098cdb-b5c7-a4a5-a52a-987cad10e8a9" [ 942.641105] env[69994]: _type = "HttpNfcLease" [ 942.641105] env[69994]: } obtained for exporting VM: (result){ [ 942.641105] env[69994]: value = "vm-647941" [ 942.641105] env[69994]: _type = "VirtualMachine" [ 942.641105] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 942.641431] env[69994]: DEBUG oslo_vmware.api [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for the lease: (returnval){ [ 942.641431] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52098cdb-b5c7-a4a5-a52a-987cad10e8a9" [ 942.641431] env[69994]: _type = "HttpNfcLease" [ 942.641431] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 942.648518] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 942.648518] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52098cdb-b5c7-a4a5-a52a-987cad10e8a9" [ 942.648518] env[69994]: _type = "HttpNfcLease" [ 942.648518] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 942.668198] env[69994]: INFO nova.compute.manager [-] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Took 1.03 seconds to deallocate network for instance. [ 942.682676] env[69994]: DEBUG nova.network.neutron [-] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.700443] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ac2fe96c-72cd-4705-9e19-c5783bb76fef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.712653] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fa25b72-f10f-4d85-aba8-12a9cc73498b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.753261] env[69994]: DEBUG nova.compute.manager [req-44b9f461-5f46-4f6a-908d-c791c6a50ab6 req-979eca2e-8234-42f6-8b84-c21f8528ce75 service nova] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Detach interface failed, port_id=7f7c4dd0-5c90-4dd1-8113-b871712bb2f7, reason: Instance a4544bc9-6935-4825-9b45-2054d2ced330 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 942.926242] env[69994]: DEBUG oslo_concurrency.lockutils [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.436s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 942.926808] env[69994]: DEBUG nova.compute.manager [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 942.929306] env[69994]: DEBUG oslo_vmware.api [None req-abd1bb36-78da-4bc9-bb56-673c0b9ea9e4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3242134, 'name': PowerOffVM_Task, 'duration_secs': 0.247201} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.929944] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59c5e4cb-172c-45f9-8487-0b04e3a163a9 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.698s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 942.930052] env[69994]: DEBUG nova.objects.instance [None req-59c5e4cb-172c-45f9-8487-0b04e3a163a9 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Lazy-loading 'resources' on Instance uuid 80705dfe-4768-4f35-8acf-316b15814f78 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 942.930957] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-abd1bb36-78da-4bc9-bb56-673c0b9ea9e4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 942.932045] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-abd1bb36-78da-4bc9-bb56-673c0b9ea9e4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 942.932045] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fca7fb8b-6992-45e0-a6f3-1ba20d1b0421 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.949227] env[69994]: INFO nova.compute.manager [-] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Took 1.31 seconds to deallocate network for instance. [ 943.003433] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-abd1bb36-78da-4bc9-bb56-673c0b9ea9e4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 943.003705] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-abd1bb36-78da-4bc9-bb56-673c0b9ea9e4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 943.003968] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-abd1bb36-78da-4bc9-bb56-673c0b9ea9e4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Deleting the datastore file [datastore1] dbad6bed-64ba-4dfd-abad-c0b2c775ba2c {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 943.004360] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e7f64ace-df93-476d-9801-19d6775377c0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.011337] env[69994]: DEBUG oslo_vmware.api [None req-abd1bb36-78da-4bc9-bb56-673c0b9ea9e4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for the task: (returnval){ [ 943.011337] env[69994]: value = "task-3242137" [ 943.011337] env[69994]: _type = "Task" [ 943.011337] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.021130] env[69994]: DEBUG oslo_vmware.api [None req-abd1bb36-78da-4bc9-bb56-673c0b9ea9e4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3242137, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.151977] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 943.151977] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52098cdb-b5c7-a4a5-a52a-987cad10e8a9" [ 943.151977] env[69994]: _type = "HttpNfcLease" [ 943.151977] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 943.152307] env[69994]: DEBUG oslo_vmware.rw_handles [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 943.152307] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52098cdb-b5c7-a4a5-a52a-987cad10e8a9" [ 943.152307] env[69994]: _type = "HttpNfcLease" [ 943.152307] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 943.153041] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebdca5fa-d671-454a-9c1b-d2aad57b629b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.160268] env[69994]: DEBUG oslo_vmware.rw_handles [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523b9d4d-b12f-8b15-734a-b5d32cf7fe00/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 943.160482] env[69994]: DEBUG oslo_vmware.rw_handles [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523b9d4d-b12f-8b15-734a-b5d32cf7fe00/disk-0.vmdk for reading. {{(pid=69994) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 943.219055] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b131ea34-06f7-4395-9c79-ed0c6557c94d tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 943.219504] env[69994]: INFO nova.compute.manager [-] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Took 1.48 seconds to deallocate network for instance. [ 943.252245] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-0f206fa7-97d9-46d9-847b-6250c5993540 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.432768] env[69994]: DEBUG nova.compute.utils [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 943.437903] env[69994]: DEBUG nova.compute.manager [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 943.437903] env[69994]: DEBUG nova.network.neutron [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 943.454842] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fb176ce9-e6b0-4b69-85eb-b9bc37a169fa tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 943.484685] env[69994]: DEBUG nova.policy [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bb1874902bc24959b717674a99e530a9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ee188ea80c9847188df8b8482b7c6ec7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 943.526032] env[69994]: DEBUG oslo_vmware.api [None req-abd1bb36-78da-4bc9-bb56-673c0b9ea9e4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Task: {'id': task-3242137, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13604} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.526392] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-abd1bb36-78da-4bc9-bb56-673c0b9ea9e4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 943.526580] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-abd1bb36-78da-4bc9-bb56-673c0b9ea9e4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 943.526793] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-abd1bb36-78da-4bc9-bb56-673c0b9ea9e4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 943.526989] env[69994]: INFO nova.compute.manager [None req-abd1bb36-78da-4bc9-bb56-673c0b9ea9e4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Took 1.13 seconds to destroy the instance on the hypervisor. [ 943.527265] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-abd1bb36-78da-4bc9-bb56-673c0b9ea9e4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 943.527492] env[69994]: DEBUG nova.compute.manager [-] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 943.527588] env[69994]: DEBUG nova.network.neutron [-] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 943.730857] env[69994]: DEBUG oslo_concurrency.lockutils [None req-edff0ddf-e5ac-4235-9a8c-c51e723671e8 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 943.798094] env[69994]: DEBUG nova.network.neutron [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Successfully created port: b3052355-2e24-4ec5-9b33-231dad5489a5 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 943.805045] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4c50243-b402-4db8-95b0-077b1a174331 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.815689] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfd5cac2-f491-4c49-b90d-f94ec062d050 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.852782] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e78c591-4e12-41e5-9680-8550610b03f0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.861648] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e5bd4b0-5aef-4f1a-b787-f5b936fd9874 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.883355] env[69994]: DEBUG nova.compute.provider_tree [None req-59c5e4cb-172c-45f9-8487-0b04e3a163a9 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 943.941053] env[69994]: DEBUG nova.compute.manager [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 944.016498] env[69994]: DEBUG nova.compute.manager [req-36a92c8a-2492-49c0-bdbd-e6ed36bf3d3b req-23e44daa-9e28-44df-8a44-1b456e8b5ffa service nova] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Received event network-vif-deleted-1c4ae184-b8b0-409f-aff4-5568af2af1b9 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 944.016791] env[69994]: INFO nova.compute.manager [req-36a92c8a-2492-49c0-bdbd-e6ed36bf3d3b req-23e44daa-9e28-44df-8a44-1b456e8b5ffa service nova] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Neutron deleted interface 1c4ae184-b8b0-409f-aff4-5568af2af1b9; detaching it from the instance and deleting it from the info cache [ 944.017016] env[69994]: DEBUG nova.network.neutron [req-36a92c8a-2492-49c0-bdbd-e6ed36bf3d3b req-23e44daa-9e28-44df-8a44-1b456e8b5ffa service nova] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.312025] env[69994]: DEBUG nova.network.neutron [-] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.386959] env[69994]: DEBUG nova.scheduler.client.report [None req-59c5e4cb-172c-45f9-8487-0b04e3a163a9 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 944.519923] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d5e4d63d-0623-425b-a483-06bee8ca26b5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.531328] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8034b19d-b32a-4d37-8297-804f438f9f92 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.567748] env[69994]: DEBUG nova.compute.manager [req-36a92c8a-2492-49c0-bdbd-e6ed36bf3d3b req-23e44daa-9e28-44df-8a44-1b456e8b5ffa service nova] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Detach interface failed, port_id=1c4ae184-b8b0-409f-aff4-5568af2af1b9, reason: Instance dbad6bed-64ba-4dfd-abad-c0b2c775ba2c could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 944.817402] env[69994]: INFO nova.compute.manager [-] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Took 1.29 seconds to deallocate network for instance. [ 944.892014] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59c5e4cb-172c-45f9-8487-0b04e3a163a9 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.962s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 944.895117] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e6d9089e-8206-49fa-a687-343cf7342d36 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.062s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 944.895117] env[69994]: DEBUG nova.objects.instance [None req-e6d9089e-8206-49fa-a687-343cf7342d36 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Lazy-loading 'resources' on Instance uuid 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 944.917477] env[69994]: INFO nova.scheduler.client.report [None req-59c5e4cb-172c-45f9-8487-0b04e3a163a9 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Deleted allocations for instance 80705dfe-4768-4f35-8acf-316b15814f78 [ 944.954070] env[69994]: DEBUG nova.compute.manager [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 944.986535] env[69994]: DEBUG nova.virt.hardware [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 944.986535] env[69994]: DEBUG nova.virt.hardware [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 944.986742] env[69994]: DEBUG nova.virt.hardware [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 944.986892] env[69994]: DEBUG nova.virt.hardware [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 944.987091] env[69994]: DEBUG nova.virt.hardware [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 944.987992] env[69994]: DEBUG nova.virt.hardware [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 944.987992] env[69994]: DEBUG nova.virt.hardware [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 944.987992] env[69994]: DEBUG nova.virt.hardware [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 944.987992] env[69994]: DEBUG nova.virt.hardware [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 944.987992] env[69994]: DEBUG nova.virt.hardware [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 944.988238] env[69994]: DEBUG nova.virt.hardware [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 944.989414] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdb465e2-3af2-44d7-8aa0-c8eb94dd32a4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.998315] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3047721c-277f-4f24-bc12-83d9d2d49812 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.217419] env[69994]: DEBUG nova.compute.manager [req-796418c5-fe60-4306-bcc8-b57254831302 req-27a16215-5623-435d-b149-970ecead9f39 service nova] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Received event network-vif-plugged-b3052355-2e24-4ec5-9b33-231dad5489a5 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 945.217890] env[69994]: DEBUG oslo_concurrency.lockutils [req-796418c5-fe60-4306-bcc8-b57254831302 req-27a16215-5623-435d-b149-970ecead9f39 service nova] Acquiring lock "e638fe4f-5f75-4d38-8a58-15dd66fd9e27-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 945.218411] env[69994]: DEBUG oslo_concurrency.lockutils [req-796418c5-fe60-4306-bcc8-b57254831302 req-27a16215-5623-435d-b149-970ecead9f39 service nova] Lock "e638fe4f-5f75-4d38-8a58-15dd66fd9e27-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 945.218670] env[69994]: DEBUG oslo_concurrency.lockutils [req-796418c5-fe60-4306-bcc8-b57254831302 req-27a16215-5623-435d-b149-970ecead9f39 service nova] Lock "e638fe4f-5f75-4d38-8a58-15dd66fd9e27-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 945.219261] env[69994]: DEBUG nova.compute.manager [req-796418c5-fe60-4306-bcc8-b57254831302 req-27a16215-5623-435d-b149-970ecead9f39 service nova] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] No waiting events found dispatching network-vif-plugged-b3052355-2e24-4ec5-9b33-231dad5489a5 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 945.219261] env[69994]: WARNING nova.compute.manager [req-796418c5-fe60-4306-bcc8-b57254831302 req-27a16215-5623-435d-b149-970ecead9f39 service nova] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Received unexpected event network-vif-plugged-b3052355-2e24-4ec5-9b33-231dad5489a5 for instance with vm_state building and task_state spawning. [ 945.311487] env[69994]: DEBUG nova.network.neutron [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Successfully updated port: b3052355-2e24-4ec5-9b33-231dad5489a5 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 945.324239] env[69994]: DEBUG oslo_concurrency.lockutils [None req-abd1bb36-78da-4bc9-bb56-673c0b9ea9e4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 945.429212] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59c5e4cb-172c-45f9-8487-0b04e3a163a9 tempest-ImagesOneServerNegativeTestJSON-407635840 tempest-ImagesOneServerNegativeTestJSON-407635840-project-member] Lock "80705dfe-4768-4f35-8acf-316b15814f78" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.701s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 945.648392] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48dd7a43-bc63-4454-8074-f9ae92305694 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.656340] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4497c54d-e059-497c-b82f-18d317ba0a75 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.687639] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-520c64ae-7cc5-4dda-aa55-377f72a86c89 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.695429] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d93e89e5-25b8-4f5a-a9a8-947128b77e3b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.708937] env[69994]: DEBUG nova.compute.provider_tree [None req-e6d9089e-8206-49fa-a687-343cf7342d36 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 945.814439] env[69994]: DEBUG oslo_concurrency.lockutils [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "refresh_cache-e638fe4f-5f75-4d38-8a58-15dd66fd9e27" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.814635] env[69994]: DEBUG oslo_concurrency.lockutils [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquired lock "refresh_cache-e638fe4f-5f75-4d38-8a58-15dd66fd9e27" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 945.814755] env[69994]: DEBUG nova.network.neutron [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 946.212642] env[69994]: DEBUG nova.scheduler.client.report [None req-e6d9089e-8206-49fa-a687-343cf7342d36 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 946.381182] env[69994]: DEBUG nova.network.neutron [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 946.619576] env[69994]: DEBUG nova.network.neutron [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Updating instance_info_cache with network_info: [{"id": "b3052355-2e24-4ec5-9b33-231dad5489a5", "address": "fa:16:3e:be:67:b0", "network": {"id": "14e5b992-c393-4acc-ab6d-ad5983fe2729", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1321568807-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee188ea80c9847188df8b8482b7c6ec7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3052355-2e", "ovs_interfaceid": "b3052355-2e24-4ec5-9b33-231dad5489a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.718927] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e6d9089e-8206-49fa-a687-343cf7342d36 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.824s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 946.721736] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.012s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 946.724218] env[69994]: INFO nova.compute.claims [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 946.746232] env[69994]: INFO nova.scheduler.client.report [None req-e6d9089e-8206-49fa-a687-343cf7342d36 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Deleted allocations for instance 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf [ 947.122175] env[69994]: DEBUG oslo_concurrency.lockutils [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Releasing lock "refresh_cache-e638fe4f-5f75-4d38-8a58-15dd66fd9e27" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 947.122609] env[69994]: DEBUG nova.compute.manager [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Instance network_info: |[{"id": "b3052355-2e24-4ec5-9b33-231dad5489a5", "address": "fa:16:3e:be:67:b0", "network": {"id": "14e5b992-c393-4acc-ab6d-ad5983fe2729", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1321568807-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee188ea80c9847188df8b8482b7c6ec7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3052355-2e", "ovs_interfaceid": "b3052355-2e24-4ec5-9b33-231dad5489a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 947.123089] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:be:67:b0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e31264e2-3e0a-4dfb-ba1f-6389d7d47548', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b3052355-2e24-4ec5-9b33-231dad5489a5', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 947.132294] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 947.132682] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 947.132909] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3c6144cc-f31a-470c-b48c-e630d187314c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.154504] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 947.154504] env[69994]: value = "task-3242138" [ 947.154504] env[69994]: _type = "Task" [ 947.154504] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.163680] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242138, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.256177] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e6d9089e-8206-49fa-a687-343cf7342d36 tempest-SecurityGroupsTestJSON-920339006 tempest-SecurityGroupsTestJSON-920339006-project-member] Lock "8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.192s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 947.263182] env[69994]: DEBUG nova.compute.manager [req-d48b47a9-3a6e-41ca-b056-3f01cda376c4 req-b03f6451-16ca-42a4-9bd2-704be891801d service nova] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Received event network-changed-b3052355-2e24-4ec5-9b33-231dad5489a5 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 947.263182] env[69994]: DEBUG nova.compute.manager [req-d48b47a9-3a6e-41ca-b056-3f01cda376c4 req-b03f6451-16ca-42a4-9bd2-704be891801d service nova] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Refreshing instance network info cache due to event network-changed-b3052355-2e24-4ec5-9b33-231dad5489a5. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 947.263182] env[69994]: DEBUG oslo_concurrency.lockutils [req-d48b47a9-3a6e-41ca-b056-3f01cda376c4 req-b03f6451-16ca-42a4-9bd2-704be891801d service nova] Acquiring lock "refresh_cache-e638fe4f-5f75-4d38-8a58-15dd66fd9e27" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.263182] env[69994]: DEBUG oslo_concurrency.lockutils [req-d48b47a9-3a6e-41ca-b056-3f01cda376c4 req-b03f6451-16ca-42a4-9bd2-704be891801d service nova] Acquired lock "refresh_cache-e638fe4f-5f75-4d38-8a58-15dd66fd9e27" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 947.263182] env[69994]: DEBUG nova.network.neutron [req-d48b47a9-3a6e-41ca-b056-3f01cda376c4 req-b03f6451-16ca-42a4-9bd2-704be891801d service nova] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Refreshing network info cache for port b3052355-2e24-4ec5-9b33-231dad5489a5 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 947.438971] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Acquiring lock "25a64898-568e-4095-aace-f8a564cdf916" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 947.439699] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Lock "25a64898-568e-4095-aace-f8a564cdf916" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 947.439830] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Acquiring lock "25a64898-568e-4095-aace-f8a564cdf916-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 947.441812] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Lock "25a64898-568e-4095-aace-f8a564cdf916-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 947.441812] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Lock "25a64898-568e-4095-aace-f8a564cdf916-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 947.442860] env[69994]: INFO nova.compute.manager [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Terminating instance [ 947.666019] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242138, 'name': CreateVM_Task, 'duration_secs': 0.443609} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.666224] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 947.667536] env[69994]: DEBUG oslo_concurrency.lockutils [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.667536] env[69994]: DEBUG oslo_concurrency.lockutils [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 947.667536] env[69994]: DEBUG oslo_concurrency.lockutils [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 947.667878] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2a5cfb2-2ecd-4601-a021-1aa7b1aa8d08 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.675972] env[69994]: DEBUG oslo_vmware.api [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 947.675972] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52220a11-fe32-ecac-d303-f04d5f8bcb48" [ 947.675972] env[69994]: _type = "Task" [ 947.675972] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.688604] env[69994]: DEBUG oslo_vmware.api [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52220a11-fe32-ecac-d303-f04d5f8bcb48, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.949017] env[69994]: DEBUG nova.compute.manager [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 947.949017] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 947.949017] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-80085864-ea99-48c0-9893-1de001f975ed {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.958330] env[69994]: DEBUG oslo_vmware.api [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Waiting for the task: (returnval){ [ 947.958330] env[69994]: value = "task-3242139" [ 947.958330] env[69994]: _type = "Task" [ 947.958330] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.970614] env[69994]: DEBUG oslo_vmware.api [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Task: {'id': task-3242139, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.997634] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f140dcd7-f808-4e18-afad-95137b820b76 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.006791] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbcab3b1-f89e-46c0-ba57-72d3360cca69 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.047130] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d70b108-7f1e-47aa-a6c8-5a716d8cc97f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.060786] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47be5bff-309c-4367-bf8c-3d5278dc7362 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.078589] env[69994]: DEBUG nova.compute.provider_tree [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 948.188515] env[69994]: DEBUG oslo_vmware.api [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52220a11-fe32-ecac-d303-f04d5f8bcb48, 'name': SearchDatastore_Task, 'duration_secs': 0.020967} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.188515] env[69994]: DEBUG oslo_concurrency.lockutils [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 948.189266] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 948.189266] env[69994]: DEBUG oslo_concurrency.lockutils [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.189417] env[69994]: DEBUG oslo_concurrency.lockutils [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 948.189687] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 948.190083] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e9bb4c31-6b52-4438-8766-05170d82f034 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.202274] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 948.202674] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 948.203843] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0948669d-92ec-4303-ae85-cb22c4caf34c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.213425] env[69994]: DEBUG oslo_vmware.api [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 948.213425] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]521bafce-00da-6486-9939-8f1a2a21e1a4" [ 948.213425] env[69994]: _type = "Task" [ 948.213425] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.226679] env[69994]: DEBUG oslo_vmware.api [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521bafce-00da-6486-9939-8f1a2a21e1a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.233523] env[69994]: DEBUG nova.network.neutron [req-d48b47a9-3a6e-41ca-b056-3f01cda376c4 req-b03f6451-16ca-42a4-9bd2-704be891801d service nova] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Updated VIF entry in instance network info cache for port b3052355-2e24-4ec5-9b33-231dad5489a5. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 948.233875] env[69994]: DEBUG nova.network.neutron [req-d48b47a9-3a6e-41ca-b056-3f01cda376c4 req-b03f6451-16ca-42a4-9bd2-704be891801d service nova] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Updating instance_info_cache with network_info: [{"id": "b3052355-2e24-4ec5-9b33-231dad5489a5", "address": "fa:16:3e:be:67:b0", "network": {"id": "14e5b992-c393-4acc-ab6d-ad5983fe2729", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1321568807-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee188ea80c9847188df8b8482b7c6ec7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3052355-2e", "ovs_interfaceid": "b3052355-2e24-4ec5-9b33-231dad5489a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 948.477879] env[69994]: DEBUG oslo_vmware.api [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Task: {'id': task-3242139, 'name': PowerOffVM_Task, 'duration_secs': 0.296625} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.477879] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 948.477879] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Volume detach. Driver type: vmdk {{(pid=69994) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 948.477879] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647869', 'volume_id': 'b7a10ba0-24cb-4e9b-a0d8-098524f6ac67', 'name': 'volume-b7a10ba0-24cb-4e9b-a0d8-098524f6ac67', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '25a64898-568e-4095-aace-f8a564cdf916', 'attached_at': '', 'detached_at': '', 'volume_id': 'b7a10ba0-24cb-4e9b-a0d8-098524f6ac67', 'serial': 'b7a10ba0-24cb-4e9b-a0d8-098524f6ac67'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 948.478392] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bcc6298-1a96-487f-b021-d748e1d32425 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.498346] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9accb09d-c635-4539-973d-ecfd58d5180c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.506789] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-815131b8-bfb9-4959-8681-b4c5d46ee383 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.528295] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08400c4a-8937-4b74-b860-dae05493e0e7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.545637] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] The volume has not been displaced from its original location: [datastore1] volume-b7a10ba0-24cb-4e9b-a0d8-098524f6ac67/volume-b7a10ba0-24cb-4e9b-a0d8-098524f6ac67.vmdk. No consolidation needed. {{(pid=69994) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 948.551244] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Reconfiguring VM instance instance-0000003d to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 948.551749] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e15723ae-6d21-48a8-901c-791153805acf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.577175] env[69994]: DEBUG oslo_vmware.api [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Waiting for the task: (returnval){ [ 948.577175] env[69994]: value = "task-3242140" [ 948.577175] env[69994]: _type = "Task" [ 948.577175] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.586119] env[69994]: DEBUG nova.scheduler.client.report [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 948.587607] env[69994]: DEBUG oslo_vmware.api [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Task: {'id': task-3242140, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.724922] env[69994]: DEBUG oslo_vmware.api [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521bafce-00da-6486-9939-8f1a2a21e1a4, 'name': SearchDatastore_Task, 'duration_secs': 0.01181} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.725744] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e18033d-56dd-41de-9e2c-2281eb432249 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.735238] env[69994]: DEBUG oslo_vmware.api [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 948.735238] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52fb6a9f-5d58-7def-adfd-671374efcb00" [ 948.735238] env[69994]: _type = "Task" [ 948.735238] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.737403] env[69994]: DEBUG oslo_concurrency.lockutils [req-d48b47a9-3a6e-41ca-b056-3f01cda376c4 req-b03f6451-16ca-42a4-9bd2-704be891801d service nova] Releasing lock "refresh_cache-e638fe4f-5f75-4d38-8a58-15dd66fd9e27" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 948.745457] env[69994]: DEBUG oslo_vmware.api [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52fb6a9f-5d58-7def-adfd-671374efcb00, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.086456] env[69994]: DEBUG oslo_vmware.api [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Task: {'id': task-3242140, 'name': ReconfigVM_Task, 'duration_secs': 0.247456} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.086860] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Reconfigured VM instance instance-0000003d to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 949.092520] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.371s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 949.093135] env[69994]: DEBUG nova.compute.manager [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 949.095609] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9467a2e1-cfab-4ba8-b42a-d3b5c238da07 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.106224] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.042s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 949.108330] env[69994]: INFO nova.compute.claims [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 949.118731] env[69994]: DEBUG oslo_vmware.api [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Waiting for the task: (returnval){ [ 949.118731] env[69994]: value = "task-3242141" [ 949.118731] env[69994]: _type = "Task" [ 949.118731] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.128220] env[69994]: DEBUG oslo_vmware.api [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Task: {'id': task-3242141, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.246950] env[69994]: DEBUG oslo_vmware.api [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52fb6a9f-5d58-7def-adfd-671374efcb00, 'name': SearchDatastore_Task, 'duration_secs': 0.016194} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.247396] env[69994]: DEBUG oslo_concurrency.lockutils [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 949.247792] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] e638fe4f-5f75-4d38-8a58-15dd66fd9e27/e638fe4f-5f75-4d38-8a58-15dd66fd9e27.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 949.248132] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0481c337-0742-40db-858e-f6b7cf2a3d91 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.256998] env[69994]: DEBUG oslo_vmware.api [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 949.256998] env[69994]: value = "task-3242142" [ 949.256998] env[69994]: _type = "Task" [ 949.256998] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.266036] env[69994]: DEBUG oslo_vmware.api [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242142, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.608606] env[69994]: DEBUG nova.compute.utils [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 949.610897] env[69994]: DEBUG nova.compute.manager [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 949.611215] env[69994]: DEBUG nova.network.neutron [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 949.634407] env[69994]: DEBUG oslo_vmware.api [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Task: {'id': task-3242141, 'name': ReconfigVM_Task, 'duration_secs': 0.245562} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.636204] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647869', 'volume_id': 'b7a10ba0-24cb-4e9b-a0d8-098524f6ac67', 'name': 'volume-b7a10ba0-24cb-4e9b-a0d8-098524f6ac67', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '25a64898-568e-4095-aace-f8a564cdf916', 'attached_at': '', 'detached_at': '', 'volume_id': 'b7a10ba0-24cb-4e9b-a0d8-098524f6ac67', 'serial': 'b7a10ba0-24cb-4e9b-a0d8-098524f6ac67'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 949.636204] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 949.638081] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9520209a-0683-4a6d-a5ad-2f4f7815c046 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.650386] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 949.650386] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-64440e88-7c5c-4040-8e89-0615420cee82 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.722960] env[69994]: DEBUG nova.policy [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b512f0a1ffba457b977e472009f59eed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '377f65074c2442588aee091b5165e1cf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 949.733708] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 949.734030] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 949.734278] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Deleting the datastore file [datastore1] 25a64898-568e-4095-aace-f8a564cdf916 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 949.734617] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aa2af52c-4adb-4f23-9dfb-3efd44f6c51e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.743637] env[69994]: DEBUG oslo_vmware.api [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Waiting for the task: (returnval){ [ 949.743637] env[69994]: value = "task-3242144" [ 949.743637] env[69994]: _type = "Task" [ 949.743637] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.755309] env[69994]: DEBUG oslo_vmware.api [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Task: {'id': task-3242144, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.771056] env[69994]: DEBUG oslo_vmware.api [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242142, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.115471] env[69994]: DEBUG nova.compute.manager [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 950.260436] env[69994]: DEBUG oslo_vmware.api [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Task: {'id': task-3242144, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157474} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.263697] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 950.263971] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 950.265135] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 950.265709] env[69994]: INFO nova.compute.manager [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Took 2.32 seconds to destroy the instance on the hypervisor. [ 950.265709] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 950.265872] env[69994]: DEBUG nova.compute.manager [-] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 950.265940] env[69994]: DEBUG nova.network.neutron [-] [instance: 25a64898-568e-4095-aace-f8a564cdf916] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 950.275215] env[69994]: DEBUG oslo_vmware.api [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242142, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.594653} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.275488] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] e638fe4f-5f75-4d38-8a58-15dd66fd9e27/e638fe4f-5f75-4d38-8a58-15dd66fd9e27.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 950.275706] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 950.275958] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-14991b6a-0cfc-43f4-8f19-b04846e25e64 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.283990] env[69994]: DEBUG oslo_vmware.api [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 950.283990] env[69994]: value = "task-3242145" [ 950.283990] env[69994]: _type = "Task" [ 950.283990] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.298556] env[69994]: DEBUG oslo_vmware.api [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242145, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.320821] env[69994]: DEBUG nova.network.neutron [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Successfully created port: acadedcd-b660-437d-be25-176371cec4b3 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 950.449567] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ea5086b-5d35-4392-aa78-b4736bab01d7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.458572] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9daa3bf-d3e2-4797-b1ef-a937eb00654a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.502361] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95d5148f-b745-467b-aa28-6f340e53939e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.514664] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-713bd90c-48db-4517-b14e-9db87256df7a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.533860] env[69994]: DEBUG nova.compute.provider_tree [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 950.801828] env[69994]: DEBUG oslo_vmware.api [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242145, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085052} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.802275] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 950.803220] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce28e0bd-1dad-4eec-a796-87e3ab4d21cf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.830218] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] e638fe4f-5f75-4d38-8a58-15dd66fd9e27/e638fe4f-5f75-4d38-8a58-15dd66fd9e27.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 950.830604] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-11e517f5-d492-4cca-b249-d46dfd1c2aa4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.854074] env[69994]: DEBUG oslo_vmware.api [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 950.854074] env[69994]: value = "task-3242146" [ 950.854074] env[69994]: _type = "Task" [ 950.854074] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.864144] env[69994]: DEBUG oslo_vmware.api [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242146, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.041486] env[69994]: DEBUG nova.scheduler.client.report [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 951.125383] env[69994]: DEBUG nova.compute.manager [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 951.163036] env[69994]: DEBUG nova.virt.hardware [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 951.163391] env[69994]: DEBUG nova.virt.hardware [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 951.163504] env[69994]: DEBUG nova.virt.hardware [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 951.163697] env[69994]: DEBUG nova.virt.hardware [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 951.163878] env[69994]: DEBUG nova.virt.hardware [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 951.164055] env[69994]: DEBUG nova.virt.hardware [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 951.164287] env[69994]: DEBUG nova.virt.hardware [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 951.164463] env[69994]: DEBUG nova.virt.hardware [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 951.164654] env[69994]: DEBUG nova.virt.hardware [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 951.164835] env[69994]: DEBUG nova.virt.hardware [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 951.165033] env[69994]: DEBUG nova.virt.hardware [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 951.165961] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39decbfe-f8f8-4d5c-aa92-98aa0f19ebfd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.175203] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5842ba6-3f1e-4fde-9b11-5a461c24a233 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.355598] env[69994]: DEBUG nova.compute.manager [req-0fe0bcf4-f932-4c89-9e78-cdda62134dd6 req-e428c76c-1e00-459f-8c72-ba3299125c65 service nova] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Received event network-vif-deleted-738bec83-fa63-41a5-899f-73cdd8bec4ba {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 951.355824] env[69994]: INFO nova.compute.manager [req-0fe0bcf4-f932-4c89-9e78-cdda62134dd6 req-e428c76c-1e00-459f-8c72-ba3299125c65 service nova] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Neutron deleted interface 738bec83-fa63-41a5-899f-73cdd8bec4ba; detaching it from the instance and deleting it from the info cache [ 951.356019] env[69994]: DEBUG nova.network.neutron [req-0fe0bcf4-f932-4c89-9e78-cdda62134dd6 req-e428c76c-1e00-459f-8c72-ba3299125c65 service nova] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.372705] env[69994]: DEBUG oslo_vmware.api [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242146, 'name': ReconfigVM_Task, 'duration_secs': 0.431338} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.373299] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Reconfigured VM instance instance-00000049 to attach disk [datastore1] e638fe4f-5f75-4d38-8a58-15dd66fd9e27/e638fe4f-5f75-4d38-8a58-15dd66fd9e27.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 951.376020] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-44b714c2-3fa4-4d8f-b68d-90314dc3dfda {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.386381] env[69994]: DEBUG oslo_vmware.api [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 951.386381] env[69994]: value = "task-3242147" [ 951.386381] env[69994]: _type = "Task" [ 951.386381] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.390664] env[69994]: DEBUG oslo_vmware.rw_handles [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523b9d4d-b12f-8b15-734a-b5d32cf7fe00/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 951.392080] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfbe36b1-5daf-43d6-b91c-9559b558d28a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.402077] env[69994]: DEBUG oslo_vmware.api [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242147, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.404753] env[69994]: DEBUG oslo_vmware.rw_handles [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523b9d4d-b12f-8b15-734a-b5d32cf7fe00/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 951.405094] env[69994]: ERROR oslo_vmware.rw_handles [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523b9d4d-b12f-8b15-734a-b5d32cf7fe00/disk-0.vmdk due to incomplete transfer. [ 951.405470] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-e8536051-a7d0-470b-a42c-48de4027cd21 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.417024] env[69994]: DEBUG oslo_vmware.rw_handles [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523b9d4d-b12f-8b15-734a-b5d32cf7fe00/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 951.417024] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Uploaded image 18590b17-addb-4605-8ce4-cb732b6f48da to the Glance image server {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 951.418986] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Destroying the VM {{(pid=69994) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 951.419953] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-46abf047-ab5c-4847-83a6-c5ba7569db25 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.430705] env[69994]: DEBUG oslo_vmware.api [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for the task: (returnval){ [ 951.430705] env[69994]: value = "task-3242148" [ 951.430705] env[69994]: _type = "Task" [ 951.430705] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.446174] env[69994]: DEBUG oslo_vmware.api [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242148, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.549378] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.443s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 951.553016] env[69994]: DEBUG nova.compute.manager [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 951.554281] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.196s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 951.555974] env[69994]: INFO nova.compute.claims [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 951.829575] env[69994]: DEBUG nova.network.neutron [-] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.864962] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-379fbe6b-aaf8-4ddd-8fde-3c0fad9e36f9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.874144] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-592a278c-5149-4a93-8ca7-28561c9a0fdb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.901023] env[69994]: DEBUG oslo_vmware.api [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242147, 'name': Rename_Task, 'duration_secs': 0.176899} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.901023] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 951.901023] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fae55871-6ccd-4b33-b827-c808948a37bf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.918230] env[69994]: DEBUG nova.compute.manager [req-0fe0bcf4-f932-4c89-9e78-cdda62134dd6 req-e428c76c-1e00-459f-8c72-ba3299125c65 service nova] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Detach interface failed, port_id=738bec83-fa63-41a5-899f-73cdd8bec4ba, reason: Instance 25a64898-568e-4095-aace-f8a564cdf916 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 951.919265] env[69994]: DEBUG oslo_vmware.api [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 951.919265] env[69994]: value = "task-3242149" [ 951.919265] env[69994]: _type = "Task" [ 951.919265] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.929591] env[69994]: DEBUG oslo_vmware.api [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242149, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.942462] env[69994]: DEBUG oslo_vmware.api [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242148, 'name': Destroy_Task, 'duration_secs': 0.39819} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.942925] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Destroyed the VM [ 951.943275] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Deleting Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 951.943839] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-365118ef-b999-428d-b311-c56ef466eeb2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.952386] env[69994]: DEBUG oslo_vmware.api [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for the task: (returnval){ [ 951.952386] env[69994]: value = "task-3242150" [ 951.952386] env[69994]: _type = "Task" [ 951.952386] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.965791] env[69994]: DEBUG oslo_vmware.api [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242150, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.061636] env[69994]: DEBUG nova.compute.utils [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 952.066310] env[69994]: DEBUG nova.compute.manager [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 952.070017] env[69994]: DEBUG nova.network.neutron [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 952.128325] env[69994]: DEBUG nova.compute.manager [req-27a40b34-d882-4b1d-a0e3-a54fe05b2393 req-c05448c4-3683-443b-8f97-a3df7cbdc64f service nova] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Received event network-vif-plugged-acadedcd-b660-437d-be25-176371cec4b3 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 952.131849] env[69994]: DEBUG oslo_concurrency.lockutils [req-27a40b34-d882-4b1d-a0e3-a54fe05b2393 req-c05448c4-3683-443b-8f97-a3df7cbdc64f service nova] Acquiring lock "4b3addd0-22b0-4793-af75-dba381c4a83f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 952.131849] env[69994]: DEBUG oslo_concurrency.lockutils [req-27a40b34-d882-4b1d-a0e3-a54fe05b2393 req-c05448c4-3683-443b-8f97-a3df7cbdc64f service nova] Lock "4b3addd0-22b0-4793-af75-dba381c4a83f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 952.131849] env[69994]: DEBUG oslo_concurrency.lockutils [req-27a40b34-d882-4b1d-a0e3-a54fe05b2393 req-c05448c4-3683-443b-8f97-a3df7cbdc64f service nova] Lock "4b3addd0-22b0-4793-af75-dba381c4a83f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 952.131849] env[69994]: DEBUG nova.compute.manager [req-27a40b34-d882-4b1d-a0e3-a54fe05b2393 req-c05448c4-3683-443b-8f97-a3df7cbdc64f service nova] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] No waiting events found dispatching network-vif-plugged-acadedcd-b660-437d-be25-176371cec4b3 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 952.131849] env[69994]: WARNING nova.compute.manager [req-27a40b34-d882-4b1d-a0e3-a54fe05b2393 req-c05448c4-3683-443b-8f97-a3df7cbdc64f service nova] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Received unexpected event network-vif-plugged-acadedcd-b660-437d-be25-176371cec4b3 for instance with vm_state building and task_state spawning. [ 952.143338] env[69994]: DEBUG nova.policy [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd74f1f7950a94d799185d2322a6c4a38', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7799f51750bb4c2589042a3b7bc8af01', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 952.195470] env[69994]: DEBUG nova.network.neutron [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Successfully updated port: acadedcd-b660-437d-be25-176371cec4b3 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 952.332264] env[69994]: INFO nova.compute.manager [-] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Took 2.07 seconds to deallocate network for instance. [ 952.438787] env[69994]: DEBUG oslo_vmware.api [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242149, 'name': PowerOnVM_Task, 'duration_secs': 0.470631} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.439191] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 952.439433] env[69994]: INFO nova.compute.manager [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Took 7.49 seconds to spawn the instance on the hypervisor. [ 952.439702] env[69994]: DEBUG nova.compute.manager [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 952.440546] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d9d2975-c084-4da7-a524-bb76e622f9f3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.467725] env[69994]: DEBUG oslo_vmware.api [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242150, 'name': RemoveSnapshot_Task, 'duration_secs': 0.391098} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.467725] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Deleted Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 952.467725] env[69994]: DEBUG nova.compute.manager [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 952.468160] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d91f9fb4-9cbf-4e68-99ab-51c326f73129 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.567515] env[69994]: DEBUG nova.compute.manager [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 952.698817] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "refresh_cache-4b3addd0-22b0-4793-af75-dba381c4a83f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.698992] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquired lock "refresh_cache-4b3addd0-22b0-4793-af75-dba381c4a83f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 952.699164] env[69994]: DEBUG nova.network.neutron [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 952.817147] env[69994]: DEBUG nova.network.neutron [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Successfully created port: 678dab49-879d-4408-9488-fef42c017965 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 952.898661] env[69994]: INFO nova.compute.manager [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Took 0.57 seconds to detach 1 volumes for instance. [ 952.902639] env[69994]: DEBUG nova.compute.manager [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Deleting volume: b7a10ba0-24cb-4e9b-a0d8-098524f6ac67 {{(pid=69994) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 952.904945] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91fb18ce-0d55-4cd1-952a-af7df2b96081 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.915227] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6a0d427-5300-44a5-885a-64bf71a387df {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.967681] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01a3f141-579e-4f41-922a-82f2bd918993 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.970293] env[69994]: INFO nova.compute.manager [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Took 28.38 seconds to build instance. [ 952.979526] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fcd1566-99a1-4e8a-8fde-dcd6c7f60c90 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.001633] env[69994]: DEBUG nova.compute.provider_tree [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 953.004343] env[69994]: INFO nova.compute.manager [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Shelve offloading [ 953.322396] env[69994]: DEBUG nova.network.neutron [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 953.474466] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 953.478234] env[69994]: DEBUG oslo_concurrency.lockutils [None req-697aecef-a273-4ded-97e3-9ead54138a24 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "e638fe4f-5f75-4d38-8a58-15dd66fd9e27" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.897s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 953.507463] env[69994]: DEBUG nova.scheduler.client.report [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 953.511220] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 953.514371] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-75b5fec2-6b6d-416a-b9ad-7869ed8840fb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.522701] env[69994]: DEBUG oslo_vmware.api [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for the task: (returnval){ [ 953.522701] env[69994]: value = "task-3242152" [ 953.522701] env[69994]: _type = "Task" [ 953.522701] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.537936] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] VM already powered off {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 953.537936] env[69994]: DEBUG nova.compute.manager [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 953.538206] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbce845a-8a1b-44f4-810f-461d107f0f06 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.549473] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Acquiring lock "refresh_cache-6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.549606] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Acquired lock "refresh_cache-6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 953.549764] env[69994]: DEBUG nova.network.neutron [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 953.564515] env[69994]: DEBUG nova.network.neutron [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Updating instance_info_cache with network_info: [{"id": "acadedcd-b660-437d-be25-176371cec4b3", "address": "fa:16:3e:b1:8a:89", "network": {"id": "0e925a40-2706-4137-8dd4-eeed26ae606e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1885024452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "377f65074c2442588aee091b5165e1cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacadedcd-b6", "ovs_interfaceid": "acadedcd-b660-437d-be25-176371cec4b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 953.586583] env[69994]: DEBUG nova.compute.manager [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 953.623322] env[69994]: DEBUG nova.virt.hardware [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 953.623594] env[69994]: DEBUG nova.virt.hardware [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 953.623784] env[69994]: DEBUG nova.virt.hardware [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 953.624012] env[69994]: DEBUG nova.virt.hardware [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 953.624257] env[69994]: DEBUG nova.virt.hardware [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 953.624331] env[69994]: DEBUG nova.virt.hardware [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 953.624534] env[69994]: DEBUG nova.virt.hardware [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 953.624693] env[69994]: DEBUG nova.virt.hardware [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 953.624856] env[69994]: DEBUG nova.virt.hardware [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 953.625038] env[69994]: DEBUG nova.virt.hardware [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 953.625866] env[69994]: DEBUG nova.virt.hardware [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 953.626096] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9254f7ef-dad8-4f5e-bfdc-ee8093a50ca2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.635339] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb316928-52f1-48a8-9e70-af4d9ad20aad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.012929] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.459s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.013684] env[69994]: DEBUG nova.compute.manager [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 954.016947] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 16.024s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 954.017399] env[69994]: DEBUG nova.objects.instance [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69994) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 954.066782] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Releasing lock "refresh_cache-4b3addd0-22b0-4793-af75-dba381c4a83f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 954.067200] env[69994]: DEBUG nova.compute.manager [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Instance network_info: |[{"id": "acadedcd-b660-437d-be25-176371cec4b3", "address": "fa:16:3e:b1:8a:89", "network": {"id": "0e925a40-2706-4137-8dd4-eeed26ae606e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1885024452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "377f65074c2442588aee091b5165e1cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacadedcd-b6", "ovs_interfaceid": "acadedcd-b660-437d-be25-176371cec4b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 954.068637] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b1:8a:89', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '87bbf4e0-9064-4516-b7e7-44973f817205', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'acadedcd-b660-437d-be25-176371cec4b3', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 954.078318] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Creating folder: Project (377f65074c2442588aee091b5165e1cf). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 954.079379] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-678a6a6f-1ffd-40a1-98a3-ab306392200d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.096412] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Created folder: Project (377f65074c2442588aee091b5165e1cf) in parent group-v647729. [ 954.096412] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Creating folder: Instances. Parent ref: group-v647943. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 954.096412] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9c57f1d5-a4fa-4c31-87b3-0f2478784a41 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.108144] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Created folder: Instances in parent group-v647943. [ 954.108842] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 954.108842] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 954.108985] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2adc7a7e-b60b-474a-bc7d-844fc44281ca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.131646] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 954.131646] env[69994]: value = "task-3242155" [ 954.131646] env[69994]: _type = "Task" [ 954.131646] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.143507] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242155, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.377761] env[69994]: DEBUG nova.network.neutron [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Updating instance_info_cache with network_info: [{"id": "e4706905-12e3-43b1-a83a-409585a96042", "address": "fa:16:3e:2d:f6:a9", "network": {"id": "ffc05707-d9b8-4f04-8df9-5c384e0942c5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1300021035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f035f8fbac46483fb4d70f166df319b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4706905-12", "ovs_interfaceid": "e4706905-12e3-43b1-a83a-409585a96042", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.385824] env[69994]: DEBUG nova.compute.manager [req-a68577c1-c02f-4243-ae8d-d76d9bd3a1e5 req-c523a870-9313-4e11-9402-868f78ff580b service nova] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Received event network-changed-acadedcd-b660-437d-be25-176371cec4b3 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 954.386096] env[69994]: DEBUG nova.compute.manager [req-a68577c1-c02f-4243-ae8d-d76d9bd3a1e5 req-c523a870-9313-4e11-9402-868f78ff580b service nova] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Refreshing instance network info cache due to event network-changed-acadedcd-b660-437d-be25-176371cec4b3. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 954.386355] env[69994]: DEBUG oslo_concurrency.lockutils [req-a68577c1-c02f-4243-ae8d-d76d9bd3a1e5 req-c523a870-9313-4e11-9402-868f78ff580b service nova] Acquiring lock "refresh_cache-4b3addd0-22b0-4793-af75-dba381c4a83f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.386503] env[69994]: DEBUG oslo_concurrency.lockutils [req-a68577c1-c02f-4243-ae8d-d76d9bd3a1e5 req-c523a870-9313-4e11-9402-868f78ff580b service nova] Acquired lock "refresh_cache-4b3addd0-22b0-4793-af75-dba381c4a83f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 954.386663] env[69994]: DEBUG nova.network.neutron [req-a68577c1-c02f-4243-ae8d-d76d9bd3a1e5 req-c523a870-9313-4e11-9402-868f78ff580b service nova] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Refreshing network info cache for port acadedcd-b660-437d-be25-176371cec4b3 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 954.524122] env[69994]: DEBUG nova.compute.utils [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 954.527334] env[69994]: DEBUG nova.compute.manager [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Not allocating networking since 'none' was specified. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 954.644784] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242155, 'name': CreateVM_Task, 'duration_secs': 0.403546} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.645182] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 954.645952] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.649079] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 954.649079] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 954.649079] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8615466b-8677-45ac-9ba3-d327cacd25f7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.652674] env[69994]: DEBUG oslo_vmware.api [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 954.652674] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52275ce5-23c2-d646-d86b-fd788b49fe5b" [ 954.652674] env[69994]: _type = "Task" [ 954.652674] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.662749] env[69994]: DEBUG oslo_vmware.api [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52275ce5-23c2-d646-d86b-fd788b49fe5b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.696564] env[69994]: DEBUG oslo_concurrency.lockutils [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "e638fe4f-5f75-4d38-8a58-15dd66fd9e27" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 954.696564] env[69994]: DEBUG oslo_concurrency.lockutils [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "e638fe4f-5f75-4d38-8a58-15dd66fd9e27" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 954.696564] env[69994]: INFO nova.compute.manager [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Shelving [ 954.739695] env[69994]: DEBUG nova.network.neutron [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Successfully updated port: 678dab49-879d-4408-9488-fef42c017965 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 954.794667] env[69994]: DEBUG nova.compute.manager [req-b6938161-8ece-4b1b-8ac1-8184a576b7b8 req-f7eb2e5b-b5a1-4191-abb4-b357c617141f service nova] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Received event network-vif-plugged-678dab49-879d-4408-9488-fef42c017965 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 954.794884] env[69994]: DEBUG oslo_concurrency.lockutils [req-b6938161-8ece-4b1b-8ac1-8184a576b7b8 req-f7eb2e5b-b5a1-4191-abb4-b357c617141f service nova] Acquiring lock "29071eb9-6334-4c23-acb4-142c12aa448d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 954.795109] env[69994]: DEBUG oslo_concurrency.lockutils [req-b6938161-8ece-4b1b-8ac1-8184a576b7b8 req-f7eb2e5b-b5a1-4191-abb4-b357c617141f service nova] Lock "29071eb9-6334-4c23-acb4-142c12aa448d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 954.795260] env[69994]: DEBUG oslo_concurrency.lockutils [req-b6938161-8ece-4b1b-8ac1-8184a576b7b8 req-f7eb2e5b-b5a1-4191-abb4-b357c617141f service nova] Lock "29071eb9-6334-4c23-acb4-142c12aa448d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.795421] env[69994]: DEBUG nova.compute.manager [req-b6938161-8ece-4b1b-8ac1-8184a576b7b8 req-f7eb2e5b-b5a1-4191-abb4-b357c617141f service nova] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] No waiting events found dispatching network-vif-plugged-678dab49-879d-4408-9488-fef42c017965 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 954.795586] env[69994]: WARNING nova.compute.manager [req-b6938161-8ece-4b1b-8ac1-8184a576b7b8 req-f7eb2e5b-b5a1-4191-abb4-b357c617141f service nova] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Received unexpected event network-vif-plugged-678dab49-879d-4408-9488-fef42c017965 for instance with vm_state building and task_state spawning. [ 954.889317] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Releasing lock "refresh_cache-6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 955.028241] env[69994]: DEBUG nova.compute.manager [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 955.033380] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1e3d04fd-3c0e-43f4-a3bd-692ae1cb9196 tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 955.034717] env[69994]: DEBUG oslo_concurrency.lockutils [None req-61a49f7a-8365-4214-b4f6-59f126849215 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.887s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 955.034913] env[69994]: DEBUG oslo_concurrency.lockutils [None req-61a49f7a-8365-4214-b4f6-59f126849215 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 955.039324] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b131ea34-06f7-4395-9c79-ed0c6557c94d tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.819s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 955.039324] env[69994]: DEBUG nova.objects.instance [None req-b131ea34-06f7-4395-9c79-ed0c6557c94d tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Lazy-loading 'resources' on Instance uuid 17389887-5463-44e1-b1c0-f123d8dedec7 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 955.076319] env[69994]: INFO nova.scheduler.client.report [None req-61a49f7a-8365-4214-b4f6-59f126849215 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Deleted allocations for instance 45a8dced-6c49-441c-92e2-ee323ed8753c [ 955.137392] env[69994]: DEBUG nova.network.neutron [req-a68577c1-c02f-4243-ae8d-d76d9bd3a1e5 req-c523a870-9313-4e11-9402-868f78ff580b service nova] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Updated VIF entry in instance network info cache for port acadedcd-b660-437d-be25-176371cec4b3. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 955.138078] env[69994]: DEBUG nova.network.neutron [req-a68577c1-c02f-4243-ae8d-d76d9bd3a1e5 req-c523a870-9313-4e11-9402-868f78ff580b service nova] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Updating instance_info_cache with network_info: [{"id": "acadedcd-b660-437d-be25-176371cec4b3", "address": "fa:16:3e:b1:8a:89", "network": {"id": "0e925a40-2706-4137-8dd4-eeed26ae606e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1885024452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "377f65074c2442588aee091b5165e1cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacadedcd-b6", "ovs_interfaceid": "acadedcd-b660-437d-be25-176371cec4b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.169050] env[69994]: DEBUG oslo_vmware.api [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52275ce5-23c2-d646-d86b-fd788b49fe5b, 'name': SearchDatastore_Task, 'duration_secs': 0.018943} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.169756] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 955.170028] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 955.170275] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.170422] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 955.170628] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 955.170911] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5dad7734-0a36-44cc-82d6-09c734853a96 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.183033] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 955.183243] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 955.184153] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-533f3225-7975-421f-800e-9dff0855dcf1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.193651] env[69994]: DEBUG oslo_vmware.api [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 955.193651] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52521882-749f-e9ef-012d-b01636a7237e" [ 955.193651] env[69994]: _type = "Task" [ 955.193651] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.211953] env[69994]: DEBUG oslo_vmware.api [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52521882-749f-e9ef-012d-b01636a7237e, 'name': SearchDatastore_Task, 'duration_secs': 0.01094} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.213252] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60687eec-59ef-4365-9fc3-956b3dcd6a65 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.219920] env[69994]: DEBUG oslo_vmware.api [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 955.219920] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52395fec-a850-5202-d6b3-2e85a4170df4" [ 955.219920] env[69994]: _type = "Task" [ 955.219920] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.233129] env[69994]: DEBUG oslo_vmware.api [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52395fec-a850-5202-d6b3-2e85a4170df4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.243264] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Acquiring lock "refresh_cache-29071eb9-6334-4c23-acb4-142c12aa448d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.243410] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Acquired lock "refresh_cache-29071eb9-6334-4c23-acb4-142c12aa448d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 955.243567] env[69994]: DEBUG nova.network.neutron [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 955.284385] env[69994]: DEBUG oslo_concurrency.lockutils [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Acquiring lock "0d42c1c7-2ac1-44f3-8311-929f141e0a65" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 955.284862] env[69994]: DEBUG oslo_concurrency.lockutils [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Lock "0d42c1c7-2ac1-44f3-8311-929f141e0a65" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 955.341680] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 955.342603] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5ca1d6b-03a1-4931-aae0-bdbecb11a936 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.351274] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 955.351523] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-14d6302e-be2f-47f4-894a-0f2975c99554 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.434264] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 955.434649] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 955.435912] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Deleting the datastore file [datastore1] 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 955.436882] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f526b764-322f-4f5a-80ff-9ddd57e4766b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.445754] env[69994]: DEBUG oslo_vmware.api [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for the task: (returnval){ [ 955.445754] env[69994]: value = "task-3242157" [ 955.445754] env[69994]: _type = "Task" [ 955.445754] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.455916] env[69994]: DEBUG oslo_vmware.api [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242157, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.591172] env[69994]: DEBUG oslo_concurrency.lockutils [None req-61a49f7a-8365-4214-b4f6-59f126849215 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "45a8dced-6c49-441c-92e2-ee323ed8753c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.310s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 955.640848] env[69994]: DEBUG oslo_concurrency.lockutils [req-a68577c1-c02f-4243-ae8d-d76d9bd3a1e5 req-c523a870-9313-4e11-9402-868f78ff580b service nova] Releasing lock "refresh_cache-4b3addd0-22b0-4793-af75-dba381c4a83f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 955.706312] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 955.706312] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cebccf72-308d-4f7f-b292-f83fa474c8d6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.714019] env[69994]: DEBUG oslo_vmware.api [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 955.714019] env[69994]: value = "task-3242158" [ 955.714019] env[69994]: _type = "Task" [ 955.714019] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.735346] env[69994]: DEBUG oslo_vmware.api [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242158, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.739261] env[69994]: DEBUG oslo_vmware.api [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52395fec-a850-5202-d6b3-2e85a4170df4, 'name': SearchDatastore_Task, 'duration_secs': 0.015592} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.742385] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 955.742766] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 4b3addd0-22b0-4793-af75-dba381c4a83f/4b3addd0-22b0-4793-af75-dba381c4a83f.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 955.743253] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-292f9cb5-5dc6-41e0-ae4d-16367a144cd5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.762219] env[69994]: DEBUG oslo_vmware.api [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 955.762219] env[69994]: value = "task-3242159" [ 955.762219] env[69994]: _type = "Task" [ 955.762219] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.774099] env[69994]: DEBUG oslo_vmware.api [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242159, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.789342] env[69994]: DEBUG nova.compute.manager [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 955.819701] env[69994]: DEBUG nova.network.neutron [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 955.908161] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1667afa-1ac2-4a0b-b454-ec4679ed0790 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.917966] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-784e5ba5-8ea3-4175-8aed-09eff142f340 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.962823] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bec9e43-e78f-467a-b790-9dc220188f25 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.977604] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-566f3aec-94a5-43d9-805d-33a3a74f76db {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.982830] env[69994]: DEBUG oslo_vmware.api [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242157, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.29369} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.983268] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 955.983496] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 955.983771] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 955.999405] env[69994]: DEBUG nova.compute.provider_tree [None req-b131ea34-06f7-4395-9c79-ed0c6557c94d tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 956.015486] env[69994]: INFO nova.scheduler.client.report [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Deleted allocations for instance 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e [ 956.041254] env[69994]: DEBUG nova.compute.manager [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 956.077596] env[69994]: DEBUG nova.virt.hardware [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 956.077899] env[69994]: DEBUG nova.virt.hardware [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 956.078068] env[69994]: DEBUG nova.virt.hardware [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 956.078253] env[69994]: DEBUG nova.virt.hardware [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 956.078465] env[69994]: DEBUG nova.virt.hardware [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 956.078659] env[69994]: DEBUG nova.virt.hardware [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 956.078887] env[69994]: DEBUG nova.virt.hardware [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 956.079060] env[69994]: DEBUG nova.virt.hardware [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 956.079227] env[69994]: DEBUG nova.virt.hardware [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 956.081374] env[69994]: DEBUG nova.virt.hardware [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 956.081374] env[69994]: DEBUG nova.virt.hardware [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 956.081374] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c28093f-f851-4ca2-9024-5d034c5ad22e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.092635] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff1ae5b7-3e81-48e0-bf6f-812cd53d048a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.110134] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Instance VIF info [] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 956.116899] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Creating folder: Project (044469e66e8d4462be528c3bd11bb654). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 956.118127] env[69994]: DEBUG nova.network.neutron [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Updating instance_info_cache with network_info: [{"id": "678dab49-879d-4408-9488-fef42c017965", "address": "fa:16:3e:2c:64:94", "network": {"id": "6e027deb-e9dd-4d16-a0cf-5c75f1d53722", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1614407267-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7799f51750bb4c2589042a3b7bc8af01", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721c6720-3ce0-450e-9951-a894f03acc27", "external-id": "nsx-vlan-transportzone-394", "segmentation_id": 394, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap678dab49-87", "ovs_interfaceid": "678dab49-879d-4408-9488-fef42c017965", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 956.119126] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a6548f8b-f7ce-4610-96ac-d2682631cc86 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.138089] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Created folder: Project (044469e66e8d4462be528c3bd11bb654) in parent group-v647729. [ 956.138236] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Creating folder: Instances. Parent ref: group-v647946. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 956.141843] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b57ebf1c-3b20-4005-a73e-1959996d4dad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.153132] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Created folder: Instances in parent group-v647946. [ 956.153339] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 956.154951] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 956.154951] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-758bda9f-3e84-4d6e-8a80-37e7de4e5da0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.174661] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 956.174661] env[69994]: value = "task-3242162" [ 956.174661] env[69994]: _type = "Task" [ 956.174661] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.188518] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242162, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.227437] env[69994]: DEBUG oslo_vmware.api [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242158, 'name': PowerOffVM_Task, 'duration_secs': 0.202713} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.227719] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 956.228593] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5abf43fc-5560-4cd2-90de-770751c8ca9d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.251072] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb040621-eace-4f41-a08b-2755c8f25500 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.274786] env[69994]: DEBUG oslo_vmware.api [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242159, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.317809] env[69994]: DEBUG oslo_concurrency.lockutils [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 956.375807] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Acquiring lock "850930f9-d5fb-4546-9796-30e164a1cdd3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 956.376258] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Lock "850930f9-d5fb-4546-9796-30e164a1cdd3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 956.507026] env[69994]: DEBUG nova.scheduler.client.report [None req-b131ea34-06f7-4395-9c79-ed0c6557c94d tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 956.522225] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 956.583217] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "e17fcc84-7c86-41b6-88ec-8a35619534b6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 956.583471] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "e17fcc84-7c86-41b6-88ec-8a35619534b6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 956.626100] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Releasing lock "refresh_cache-29071eb9-6334-4c23-acb4-142c12aa448d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 956.626431] env[69994]: DEBUG nova.compute.manager [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Instance network_info: |[{"id": "678dab49-879d-4408-9488-fef42c017965", "address": "fa:16:3e:2c:64:94", "network": {"id": "6e027deb-e9dd-4d16-a0cf-5c75f1d53722", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1614407267-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7799f51750bb4c2589042a3b7bc8af01", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721c6720-3ce0-450e-9951-a894f03acc27", "external-id": "nsx-vlan-transportzone-394", "segmentation_id": 394, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap678dab49-87", "ovs_interfaceid": "678dab49-879d-4408-9488-fef42c017965", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 956.627045] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2c:64:94', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '721c6720-3ce0-450e-9951-a894f03acc27', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '678dab49-879d-4408-9488-fef42c017965', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 956.635302] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 956.635504] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 956.635725] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-756d697c-c520-4ac9-ab83-5d01cfb24d2e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.656276] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 956.656276] env[69994]: value = "task-3242163" [ 956.656276] env[69994]: _type = "Task" [ 956.656276] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.664994] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242163, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.683948] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242162, 'name': CreateVM_Task, 'duration_secs': 0.358654} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.684123] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 956.684543] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.684702] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 956.685035] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 956.685287] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e31b7b7-5470-4f2c-b95b-eab784d3f8a1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.690917] env[69994]: DEBUG oslo_vmware.api [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Waiting for the task: (returnval){ [ 956.690917] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52cf92a5-62b2-50b3-2847-b56a1d58db6c" [ 956.690917] env[69994]: _type = "Task" [ 956.690917] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.699806] env[69994]: DEBUG oslo_vmware.api [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52cf92a5-62b2-50b3-2847-b56a1d58db6c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.769138] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Creating Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 956.769447] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-edec8da1-8ff4-422e-8d2c-803cd11c8eb3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.778660] env[69994]: DEBUG oslo_vmware.api [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242159, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.550906} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.780139] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 4b3addd0-22b0-4793-af75-dba381c4a83f/4b3addd0-22b0-4793-af75-dba381c4a83f.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 956.780415] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 956.780818] env[69994]: DEBUG oslo_vmware.api [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 956.780818] env[69994]: value = "task-3242164" [ 956.780818] env[69994]: _type = "Task" [ 956.780818] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.781030] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6a5bb4fe-9a2f-4c49-8f0b-85ee30a0a49b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.791566] env[69994]: DEBUG oslo_vmware.api [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242164, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.792842] env[69994]: DEBUG oslo_vmware.api [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 956.792842] env[69994]: value = "task-3242165" [ 956.792842] env[69994]: _type = "Task" [ 956.792842] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.801469] env[69994]: DEBUG oslo_vmware.api [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242165, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.831656] env[69994]: DEBUG nova.compute.manager [req-7077a71f-1f0f-4901-b099-0d37facffe13 req-cae33b02-cf69-4ffd-97a8-2f2dae611df6 service nova] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Received event network-changed-678dab49-879d-4408-9488-fef42c017965 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 956.831880] env[69994]: DEBUG nova.compute.manager [req-7077a71f-1f0f-4901-b099-0d37facffe13 req-cae33b02-cf69-4ffd-97a8-2f2dae611df6 service nova] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Refreshing instance network info cache due to event network-changed-678dab49-879d-4408-9488-fef42c017965. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 956.832158] env[69994]: DEBUG oslo_concurrency.lockutils [req-7077a71f-1f0f-4901-b099-0d37facffe13 req-cae33b02-cf69-4ffd-97a8-2f2dae611df6 service nova] Acquiring lock "refresh_cache-29071eb9-6334-4c23-acb4-142c12aa448d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.832360] env[69994]: DEBUG oslo_concurrency.lockutils [req-7077a71f-1f0f-4901-b099-0d37facffe13 req-cae33b02-cf69-4ffd-97a8-2f2dae611df6 service nova] Acquired lock "refresh_cache-29071eb9-6334-4c23-acb4-142c12aa448d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 956.832559] env[69994]: DEBUG nova.network.neutron [req-7077a71f-1f0f-4901-b099-0d37facffe13 req-cae33b02-cf69-4ffd-97a8-2f2dae611df6 service nova] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Refreshing network info cache for port 678dab49-879d-4408-9488-fef42c017965 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 956.880645] env[69994]: DEBUG nova.compute.manager [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 957.014891] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b131ea34-06f7-4395-9c79-ed0c6557c94d tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.976s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 957.017893] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fb176ce9-e6b0-4b69-85eb-b9bc37a169fa tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.563s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 957.017893] env[69994]: DEBUG nova.objects.instance [None req-fb176ce9-e6b0-4b69-85eb-b9bc37a169fa tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lazy-loading 'resources' on Instance uuid 566522b0-7aa7-4552-9be7-035d742ba394 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 957.036282] env[69994]: INFO nova.scheduler.client.report [None req-b131ea34-06f7-4395-9c79-ed0c6557c94d tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Deleted allocations for instance 17389887-5463-44e1-b1c0-f123d8dedec7 [ 957.085915] env[69994]: DEBUG nova.compute.manager [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 957.165878] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242163, 'name': CreateVM_Task, 'duration_secs': 0.379755} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.166103] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 957.166772] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.201087] env[69994]: DEBUG oslo_vmware.api [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52cf92a5-62b2-50b3-2847-b56a1d58db6c, 'name': SearchDatastore_Task, 'duration_secs': 0.014169} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.201400] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 957.201634] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 957.201874] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.202053] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 957.202282] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 957.202570] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 957.202871] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 957.203108] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a9e3f387-7453-494c-a756-cb54c57e769b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.204974] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9eafa354-d300-43e9-bacd-b66f91776835 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.211164] env[69994]: DEBUG oslo_vmware.api [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for the task: (returnval){ [ 957.211164] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52dddb0f-2662-4da4-c6a3-50a5fcd7c955" [ 957.211164] env[69994]: _type = "Task" [ 957.211164] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.215321] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 957.215497] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 957.216548] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fba4c44e-308c-4ad2-862e-4ab8b37fdc1b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.222290] env[69994]: DEBUG oslo_vmware.api [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52dddb0f-2662-4da4-c6a3-50a5fcd7c955, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.225442] env[69994]: DEBUG oslo_vmware.api [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Waiting for the task: (returnval){ [ 957.225442] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52d26cee-21d1-af0e-713a-817f2d45ad1d" [ 957.225442] env[69994]: _type = "Task" [ 957.225442] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.233683] env[69994]: DEBUG oslo_vmware.api [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52d26cee-21d1-af0e-713a-817f2d45ad1d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.293069] env[69994]: DEBUG oslo_vmware.api [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242164, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.306516] env[69994]: DEBUG oslo_vmware.api [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242165, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071908} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.306833] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 957.307801] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be81916a-22f3-40f5-9379-73307147ddbd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.337733] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] 4b3addd0-22b0-4793-af75-dba381c4a83f/4b3addd0-22b0-4793-af75-dba381c4a83f.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 957.340101] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-186183d6-f875-48e1-b555-6b263afb33ca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.362948] env[69994]: DEBUG oslo_vmware.api [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 957.362948] env[69994]: value = "task-3242166" [ 957.362948] env[69994]: _type = "Task" [ 957.362948] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.377566] env[69994]: DEBUG oslo_vmware.api [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242166, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.404608] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 957.545963] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b131ea34-06f7-4395-9c79-ed0c6557c94d tempest-ServersListShow298Test-1168383887 tempest-ServersListShow298Test-1168383887-project-member] Lock "17389887-5463-44e1-b1c0-f123d8dedec7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.634s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 957.605220] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 957.661341] env[69994]: DEBUG nova.network.neutron [req-7077a71f-1f0f-4901-b099-0d37facffe13 req-cae33b02-cf69-4ffd-97a8-2f2dae611df6 service nova] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Updated VIF entry in instance network info cache for port 678dab49-879d-4408-9488-fef42c017965. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 957.661713] env[69994]: DEBUG nova.network.neutron [req-7077a71f-1f0f-4901-b099-0d37facffe13 req-cae33b02-cf69-4ffd-97a8-2f2dae611df6 service nova] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Updating instance_info_cache with network_info: [{"id": "678dab49-879d-4408-9488-fef42c017965", "address": "fa:16:3e:2c:64:94", "network": {"id": "6e027deb-e9dd-4d16-a0cf-5c75f1d53722", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1614407267-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7799f51750bb4c2589042a3b7bc8af01", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721c6720-3ce0-450e-9951-a894f03acc27", "external-id": "nsx-vlan-transportzone-394", "segmentation_id": 394, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap678dab49-87", "ovs_interfaceid": "678dab49-879d-4408-9488-fef42c017965", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.721997] env[69994]: DEBUG oslo_vmware.api [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52dddb0f-2662-4da4-c6a3-50a5fcd7c955, 'name': SearchDatastore_Task, 'duration_secs': 0.014788} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.724423] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 957.724647] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 957.724916] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.735072] env[69994]: DEBUG oslo_vmware.api [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52d26cee-21d1-af0e-713a-817f2d45ad1d, 'name': SearchDatastore_Task, 'duration_secs': 0.010725} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.738585] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-801c4e15-11fd-4151-8cf1-4dbe70f86a0b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.744543] env[69994]: DEBUG oslo_vmware.api [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Waiting for the task: (returnval){ [ 957.744543] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]525f2cac-6859-9e8f-a086-1adac56c9203" [ 957.744543] env[69994]: _type = "Task" [ 957.744543] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.760407] env[69994]: DEBUG oslo_vmware.api [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]525f2cac-6859-9e8f-a086-1adac56c9203, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.795796] env[69994]: DEBUG oslo_vmware.api [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242164, 'name': CreateSnapshot_Task, 'duration_secs': 0.528815} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.798452] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Created Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 957.799816] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69c53184-6fd6-464c-b1ec-a3cb7e8ad421 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.873098] env[69994]: DEBUG oslo_vmware.api [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242166, 'name': ReconfigVM_Task, 'duration_secs': 0.274919} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.874235] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Reconfigured VM instance instance-0000004a to attach disk [datastore1] 4b3addd0-22b0-4793-af75-dba381c4a83f/4b3addd0-22b0-4793-af75-dba381c4a83f.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 957.875340] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e544eaa-4401-41fc-a718-26366ecb18eb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.877833] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a36b24d1-4526-4c0e-a3ad-7c4c034a4bed {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.888252] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75df0e4d-7edd-4c55-bdf5-0e4b75fea1c8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.892037] env[69994]: DEBUG oslo_vmware.api [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 957.892037] env[69994]: value = "task-3242167" [ 957.892037] env[69994]: _type = "Task" [ 957.892037] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.918082] env[69994]: DEBUG oslo_concurrency.lockutils [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Acquiring lock "6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 957.919974] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e649c15e-45b5-4a77-96af-7d16f117d9e9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.925906] env[69994]: DEBUG oslo_vmware.api [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242167, 'name': Rename_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.931356] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19f9369a-f91e-46de-a624-adc85eef0f5f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.945637] env[69994]: DEBUG nova.compute.provider_tree [None req-fb176ce9-e6b0-4b69-85eb-b9bc37a169fa tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 958.164922] env[69994]: DEBUG oslo_concurrency.lockutils [req-7077a71f-1f0f-4901-b099-0d37facffe13 req-cae33b02-cf69-4ffd-97a8-2f2dae611df6 service nova] Releasing lock "refresh_cache-29071eb9-6334-4c23-acb4-142c12aa448d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 958.165569] env[69994]: DEBUG nova.compute.manager [req-7077a71f-1f0f-4901-b099-0d37facffe13 req-cae33b02-cf69-4ffd-97a8-2f2dae611df6 service nova] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Received event network-vif-unplugged-e4706905-12e3-43b1-a83a-409585a96042 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 958.165901] env[69994]: DEBUG oslo_concurrency.lockutils [req-7077a71f-1f0f-4901-b099-0d37facffe13 req-cae33b02-cf69-4ffd-97a8-2f2dae611df6 service nova] Acquiring lock "6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 958.166144] env[69994]: DEBUG oslo_concurrency.lockutils [req-7077a71f-1f0f-4901-b099-0d37facffe13 req-cae33b02-cf69-4ffd-97a8-2f2dae611df6 service nova] Lock "6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 958.166317] env[69994]: DEBUG oslo_concurrency.lockutils [req-7077a71f-1f0f-4901-b099-0d37facffe13 req-cae33b02-cf69-4ffd-97a8-2f2dae611df6 service nova] Lock "6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 958.166489] env[69994]: DEBUG nova.compute.manager [req-7077a71f-1f0f-4901-b099-0d37facffe13 req-cae33b02-cf69-4ffd-97a8-2f2dae611df6 service nova] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] No waiting events found dispatching network-vif-unplugged-e4706905-12e3-43b1-a83a-409585a96042 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 958.166665] env[69994]: WARNING nova.compute.manager [req-7077a71f-1f0f-4901-b099-0d37facffe13 req-cae33b02-cf69-4ffd-97a8-2f2dae611df6 service nova] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Received unexpected event network-vif-unplugged-e4706905-12e3-43b1-a83a-409585a96042 for instance with vm_state shelved_offloaded and task_state unshelving. [ 958.166837] env[69994]: DEBUG nova.compute.manager [req-7077a71f-1f0f-4901-b099-0d37facffe13 req-cae33b02-cf69-4ffd-97a8-2f2dae611df6 service nova] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Received event network-changed-e4706905-12e3-43b1-a83a-409585a96042 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 958.167061] env[69994]: DEBUG nova.compute.manager [req-7077a71f-1f0f-4901-b099-0d37facffe13 req-cae33b02-cf69-4ffd-97a8-2f2dae611df6 service nova] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Refreshing instance network info cache due to event network-changed-e4706905-12e3-43b1-a83a-409585a96042. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 958.167225] env[69994]: DEBUG oslo_concurrency.lockutils [req-7077a71f-1f0f-4901-b099-0d37facffe13 req-cae33b02-cf69-4ffd-97a8-2f2dae611df6 service nova] Acquiring lock "refresh_cache-6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.167362] env[69994]: DEBUG oslo_concurrency.lockutils [req-7077a71f-1f0f-4901-b099-0d37facffe13 req-cae33b02-cf69-4ffd-97a8-2f2dae611df6 service nova] Acquired lock "refresh_cache-6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 958.167535] env[69994]: DEBUG nova.network.neutron [req-7077a71f-1f0f-4901-b099-0d37facffe13 req-cae33b02-cf69-4ffd-97a8-2f2dae611df6 service nova] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Refreshing network info cache for port e4706905-12e3-43b1-a83a-409585a96042 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 958.255397] env[69994]: DEBUG oslo_vmware.api [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]525f2cac-6859-9e8f-a086-1adac56c9203, 'name': SearchDatastore_Task, 'duration_secs': 0.028917} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.255651] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 958.255900] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] ee7e0c02-ef19-4475-a936-f591c8185797/ee7e0c02-ef19-4475-a936-f591c8185797.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 958.256185] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 958.256362] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 958.256572] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3dbd65b5-380c-45a6-83c2-1fab76f8f177 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.258455] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ebcfa2a4-4e38-4929-bd18-f5c3d3c3d0fb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.267049] env[69994]: DEBUG oslo_vmware.api [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Waiting for the task: (returnval){ [ 958.267049] env[69994]: value = "task-3242168" [ 958.267049] env[69994]: _type = "Task" [ 958.267049] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.268175] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 958.268350] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 958.271721] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17db5002-d7c7-4070-a10d-3df03be7b6f3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.277140] env[69994]: DEBUG oslo_vmware.api [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for the task: (returnval){ [ 958.277140] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52697a8e-ca3d-9e78-64a7-160c9136b698" [ 958.277140] env[69994]: _type = "Task" [ 958.277140] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.281153] env[69994]: DEBUG oslo_vmware.api [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': task-3242168, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.293650] env[69994]: DEBUG oslo_vmware.api [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52697a8e-ca3d-9e78-64a7-160c9136b698, 'name': SearchDatastore_Task, 'duration_secs': 0.010512} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.297612] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8ded99a-90d5-4b95-86ca-1b9bd5569c0a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.303055] env[69994]: DEBUG oslo_vmware.api [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for the task: (returnval){ [ 958.303055] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52316d20-28cf-fe3c-8cd4-a7b2642d323e" [ 958.303055] env[69994]: _type = "Task" [ 958.303055] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.318961] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Creating linked-clone VM from snapshot {{(pid=69994) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 958.319279] env[69994]: DEBUG oslo_vmware.api [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52316d20-28cf-fe3c-8cd4-a7b2642d323e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.319502] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-412f0452-a2f6-4748-bbf3-b55a1bd1e498 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.327738] env[69994]: DEBUG oslo_vmware.api [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 958.327738] env[69994]: value = "task-3242169" [ 958.327738] env[69994]: _type = "Task" [ 958.327738] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.336319] env[69994]: DEBUG oslo_vmware.api [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242169, 'name': CloneVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.401058] env[69994]: DEBUG oslo_vmware.api [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242167, 'name': Rename_Task, 'duration_secs': 0.144857} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.401384] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 958.401661] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b37c19c8-e985-4aa7-81df-bb74ac7abb46 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.408664] env[69994]: DEBUG oslo_vmware.api [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 958.408664] env[69994]: value = "task-3242170" [ 958.408664] env[69994]: _type = "Task" [ 958.408664] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.417339] env[69994]: DEBUG oslo_vmware.api [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242170, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.449232] env[69994]: DEBUG nova.scheduler.client.report [None req-fb176ce9-e6b0-4b69-85eb-b9bc37a169fa tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 958.778778] env[69994]: DEBUG oslo_vmware.api [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': task-3242168, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.815469] env[69994]: DEBUG oslo_vmware.api [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52316d20-28cf-fe3c-8cd4-a7b2642d323e, 'name': SearchDatastore_Task, 'duration_secs': 0.010103} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.815732] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 958.815990] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 29071eb9-6334-4c23-acb4-142c12aa448d/29071eb9-6334-4c23-acb4-142c12aa448d.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 958.816267] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fa93b432-6a78-4a5c-ae0a-6ce969bc7ec2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.823417] env[69994]: DEBUG oslo_vmware.api [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for the task: (returnval){ [ 958.823417] env[69994]: value = "task-3242171" [ 958.823417] env[69994]: _type = "Task" [ 958.823417] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.834373] env[69994]: DEBUG oslo_vmware.api [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3242171, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.840165] env[69994]: DEBUG oslo_vmware.api [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242169, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.879850] env[69994]: DEBUG nova.network.neutron [req-7077a71f-1f0f-4901-b099-0d37facffe13 req-cae33b02-cf69-4ffd-97a8-2f2dae611df6 service nova] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Updated VIF entry in instance network info cache for port e4706905-12e3-43b1-a83a-409585a96042. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 958.880222] env[69994]: DEBUG nova.network.neutron [req-7077a71f-1f0f-4901-b099-0d37facffe13 req-cae33b02-cf69-4ffd-97a8-2f2dae611df6 service nova] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Updating instance_info_cache with network_info: [{"id": "e4706905-12e3-43b1-a83a-409585a96042", "address": "fa:16:3e:2d:f6:a9", "network": {"id": "ffc05707-d9b8-4f04-8df9-5c384e0942c5", "bridge": null, "label": "tempest-ServersNegativeTestJSON-1300021035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f035f8fbac46483fb4d70f166df319b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tape4706905-12", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.919315] env[69994]: DEBUG oslo_vmware.api [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242170, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.955836] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fb176ce9-e6b0-4b69-85eb-b9bc37a169fa tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.938s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 958.958560] env[69994]: DEBUG oslo_concurrency.lockutils [None req-edff0ddf-e5ac-4235-9a8c-c51e723671e8 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.228s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 958.958872] env[69994]: DEBUG nova.objects.instance [None req-edff0ddf-e5ac-4235-9a8c-c51e723671e8 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Lazy-loading 'resources' on Instance uuid a4544bc9-6935-4825-9b45-2054d2ced330 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 958.978082] env[69994]: INFO nova.scheduler.client.report [None req-fb176ce9-e6b0-4b69-85eb-b9bc37a169fa tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Deleted allocations for instance 566522b0-7aa7-4552-9be7-035d742ba394 [ 959.280175] env[69994]: DEBUG oslo_vmware.api [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': task-3242168, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.52108} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.280175] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] ee7e0c02-ef19-4475-a936-f591c8185797/ee7e0c02-ef19-4475-a936-f591c8185797.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 959.280611] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 959.280611] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cab53fd7-e69c-4fc7-84b7-eb93dc50d559 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.289660] env[69994]: DEBUG oslo_vmware.api [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Waiting for the task: (returnval){ [ 959.289660] env[69994]: value = "task-3242172" [ 959.289660] env[69994]: _type = "Task" [ 959.289660] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.301160] env[69994]: DEBUG oslo_vmware.api [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': task-3242172, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.336694] env[69994]: DEBUG oslo_vmware.api [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3242171, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.463354} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.336694] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 29071eb9-6334-4c23-acb4-142c12aa448d/29071eb9-6334-4c23-acb4-142c12aa448d.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 959.336957] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 959.337393] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4b11fb34-834b-4fd9-aba7-67544e5131a7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.345148] env[69994]: DEBUG oslo_vmware.api [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242169, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.353052] env[69994]: DEBUG oslo_vmware.api [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for the task: (returnval){ [ 959.353052] env[69994]: value = "task-3242173" [ 959.353052] env[69994]: _type = "Task" [ 959.353052] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.363067] env[69994]: DEBUG oslo_vmware.api [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3242173, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.382984] env[69994]: DEBUG oslo_concurrency.lockutils [req-7077a71f-1f0f-4901-b099-0d37facffe13 req-cae33b02-cf69-4ffd-97a8-2f2dae611df6 service nova] Releasing lock "refresh_cache-6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 959.422942] env[69994]: DEBUG oslo_vmware.api [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242170, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.491174] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fb176ce9-e6b0-4b69-85eb-b9bc37a169fa tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "566522b0-7aa7-4552-9be7-035d742ba394" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.507s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 959.714104] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34f2a7f0-63b8-4713-a9d1-9131c8416f2d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.722329] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4767d9d4-6795-4054-a62c-c780021de02a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.752524] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05b80536-c8bf-4f37-a744-181049005df6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.764565] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5531f4d4-3f00-48e1-abe4-07f9c158d384 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.785638] env[69994]: DEBUG nova.compute.provider_tree [None req-edff0ddf-e5ac-4235-9a8c-c51e723671e8 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 959.799934] env[69994]: DEBUG oslo_vmware.api [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': task-3242172, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087184} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.800292] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 959.800947] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50a1a46c-3b0d-4546-9518-8df75b67372b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.823213] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] ee7e0c02-ef19-4475-a936-f591c8185797/ee7e0c02-ef19-4475-a936-f591c8185797.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 959.823213] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b2fa5815-55e7-4606-8b9c-66ed16ae4020 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.846868] env[69994]: DEBUG oslo_vmware.api [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242169, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.848212] env[69994]: DEBUG oslo_vmware.api [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Waiting for the task: (returnval){ [ 959.848212] env[69994]: value = "task-3242174" [ 959.848212] env[69994]: _type = "Task" [ 959.848212] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.856452] env[69994]: DEBUG oslo_vmware.api [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': task-3242174, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.864728] env[69994]: DEBUG oslo_vmware.api [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3242173, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085661} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.865090] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 959.865952] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6c87436-133a-4e4c-8f85-c7eb207fbc1e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.887990] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] 29071eb9-6334-4c23-acb4-142c12aa448d/29071eb9-6334-4c23-acb4-142c12aa448d.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 959.888300] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-220e4ea7-0c40-49cf-8575-a525d35f606d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.908744] env[69994]: DEBUG oslo_vmware.api [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for the task: (returnval){ [ 959.908744] env[69994]: value = "task-3242175" [ 959.908744] env[69994]: _type = "Task" [ 959.908744] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.921059] env[69994]: DEBUG oslo_vmware.api [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3242175, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.923958] env[69994]: DEBUG oslo_vmware.api [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242170, 'name': PowerOnVM_Task, 'duration_secs': 1.016646} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.924208] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 959.924409] env[69994]: INFO nova.compute.manager [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Took 8.80 seconds to spawn the instance on the hypervisor. [ 959.924587] env[69994]: DEBUG nova.compute.manager [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 959.925405] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-345e55b0-b7ab-459a-be58-257755663343 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.934539] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a62a7d39-ca72-4ba9-aca8-6a4020b7db8a tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquiring lock "e8caf244-413b-49bb-bdff-79aca0ccbc2b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 959.934776] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a62a7d39-ca72-4ba9-aca8-6a4020b7db8a tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "e8caf244-413b-49bb-bdff-79aca0ccbc2b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 959.934975] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a62a7d39-ca72-4ba9-aca8-6a4020b7db8a tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquiring lock "e8caf244-413b-49bb-bdff-79aca0ccbc2b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 959.935177] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a62a7d39-ca72-4ba9-aca8-6a4020b7db8a tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "e8caf244-413b-49bb-bdff-79aca0ccbc2b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 959.935342] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a62a7d39-ca72-4ba9-aca8-6a4020b7db8a tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "e8caf244-413b-49bb-bdff-79aca0ccbc2b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 959.937338] env[69994]: INFO nova.compute.manager [None req-a62a7d39-ca72-4ba9-aca8-6a4020b7db8a tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Terminating instance [ 960.289742] env[69994]: DEBUG nova.scheduler.client.report [None req-edff0ddf-e5ac-4235-9a8c-c51e723671e8 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 960.347330] env[69994]: DEBUG oslo_vmware.api [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242169, 'name': CloneVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.356813] env[69994]: DEBUG oslo_vmware.api [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': task-3242174, 'name': ReconfigVM_Task, 'duration_secs': 0.269525} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.357094] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Reconfigured VM instance instance-0000004c to attach disk [datastore1] ee7e0c02-ef19-4475-a936-f591c8185797/ee7e0c02-ef19-4475-a936-f591c8185797.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 960.357737] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fcea45f0-4d21-4cee-b0d3-16b4beda3940 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.367417] env[69994]: DEBUG oslo_vmware.api [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Waiting for the task: (returnval){ [ 960.367417] env[69994]: value = "task-3242176" [ 960.367417] env[69994]: _type = "Task" [ 960.367417] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.376470] env[69994]: DEBUG oslo_vmware.api [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': task-3242176, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.418992] env[69994]: DEBUG oslo_vmware.api [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3242175, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.442072] env[69994]: DEBUG nova.compute.manager [None req-a62a7d39-ca72-4ba9-aca8-6a4020b7db8a tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 960.442354] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a62a7d39-ca72-4ba9-aca8-6a4020b7db8a tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 960.443313] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd3f60c1-24d7-43bd-8a9e-148c89cd8bf5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.450996] env[69994]: INFO nova.compute.manager [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Took 25.76 seconds to build instance. [ 960.459089] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a62a7d39-ca72-4ba9-aca8-6a4020b7db8a tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 960.459089] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-98bb3510-9124-4e18-bf4e-e40cc24e215f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.466317] env[69994]: DEBUG oslo_vmware.api [None req-a62a7d39-ca72-4ba9-aca8-6a4020b7db8a tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 960.466317] env[69994]: value = "task-3242177" [ 960.466317] env[69994]: _type = "Task" [ 960.466317] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.476792] env[69994]: DEBUG oslo_vmware.api [None req-a62a7d39-ca72-4ba9-aca8-6a4020b7db8a tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242177, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.795670] env[69994]: DEBUG oslo_concurrency.lockutils [None req-edff0ddf-e5ac-4235-9a8c-c51e723671e8 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.837s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 960.797947] env[69994]: DEBUG oslo_concurrency.lockutils [None req-abd1bb36-78da-4bc9-bb56-673c0b9ea9e4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.474s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 960.800763] env[69994]: DEBUG nova.objects.instance [None req-abd1bb36-78da-4bc9-bb56-673c0b9ea9e4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Lazy-loading 'resources' on Instance uuid dbad6bed-64ba-4dfd-abad-c0b2c775ba2c {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 960.820633] env[69994]: INFO nova.scheduler.client.report [None req-edff0ddf-e5ac-4235-9a8c-c51e723671e8 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Deleted allocations for instance a4544bc9-6935-4825-9b45-2054d2ced330 [ 960.848495] env[69994]: DEBUG oslo_vmware.api [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242169, 'name': CloneVM_Task, 'duration_secs': 2.046512} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.848765] env[69994]: INFO nova.virt.vmwareapi.vmops [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Created linked-clone VM from snapshot [ 960.849595] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba6adcff-8296-411d-a46e-5185b1b2f9dd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.858433] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Uploading image 0c454b0e-ff19-4782-b301-ec0768462b7b {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 960.876437] env[69994]: DEBUG oslo_vmware.api [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': task-3242176, 'name': Rename_Task, 'duration_secs': 0.146612} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.876732] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 960.877012] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-00631fd4-81d3-445c-a625-0be36f555aab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.884753] env[69994]: DEBUG oslo_vmware.api [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Waiting for the task: (returnval){ [ 960.884753] env[69994]: value = "task-3242178" [ 960.884753] env[69994]: _type = "Task" [ 960.884753] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.889631] env[69994]: DEBUG oslo_vmware.rw_handles [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 960.889631] env[69994]: value = "vm-647951" [ 960.889631] env[69994]: _type = "VirtualMachine" [ 960.889631] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 960.889865] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-295ad134-1d7b-4792-a80c-41f1d9456e97 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.896366] env[69994]: DEBUG oslo_vmware.api [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': task-3242178, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.897639] env[69994]: DEBUG oslo_vmware.rw_handles [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lease: (returnval){ [ 960.897639] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52bd4b9d-9b6f-470d-c493-eea1cf2c86de" [ 960.897639] env[69994]: _type = "HttpNfcLease" [ 960.897639] env[69994]: } obtained for exporting VM: (result){ [ 960.897639] env[69994]: value = "vm-647951" [ 960.897639] env[69994]: _type = "VirtualMachine" [ 960.897639] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 960.897925] env[69994]: DEBUG oslo_vmware.api [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the lease: (returnval){ [ 960.897925] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52bd4b9d-9b6f-470d-c493-eea1cf2c86de" [ 960.897925] env[69994]: _type = "HttpNfcLease" [ 960.897925] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 960.904841] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 960.904841] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52bd4b9d-9b6f-470d-c493-eea1cf2c86de" [ 960.904841] env[69994]: _type = "HttpNfcLease" [ 960.904841] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 960.920295] env[69994]: DEBUG oslo_vmware.api [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3242175, 'name': ReconfigVM_Task, 'duration_secs': 0.738402} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.920572] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Reconfigured VM instance instance-0000004b to attach disk [datastore1] 29071eb9-6334-4c23-acb4-142c12aa448d/29071eb9-6334-4c23-acb4-142c12aa448d.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 960.924350] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3df0db11-1cc6-4eeb-88c1-daa9a4bee5ee {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.932453] env[69994]: DEBUG oslo_vmware.api [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for the task: (returnval){ [ 960.932453] env[69994]: value = "task-3242180" [ 960.932453] env[69994]: _type = "Task" [ 960.932453] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.942633] env[69994]: DEBUG oslo_vmware.api [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3242180, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.955326] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb579e59-d630-4236-9059-a39bf8f37be0 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "4b3addd0-22b0-4793-af75-dba381c4a83f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.277s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 960.978202] env[69994]: DEBUG oslo_vmware.api [None req-a62a7d39-ca72-4ba9-aca8-6a4020b7db8a tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242177, 'name': PowerOffVM_Task, 'duration_secs': 0.229575} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.978482] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a62a7d39-ca72-4ba9-aca8-6a4020b7db8a tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 960.978651] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a62a7d39-ca72-4ba9-aca8-6a4020b7db8a tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 960.978910] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e0baed36-8f40-4376-9446-c5c34a890b67 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.053721] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a62a7d39-ca72-4ba9-aca8-6a4020b7db8a tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 961.053832] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a62a7d39-ca72-4ba9-aca8-6a4020b7db8a tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 961.054020] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-a62a7d39-ca72-4ba9-aca8-6a4020b7db8a tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Deleting the datastore file [datastore1] e8caf244-413b-49bb-bdff-79aca0ccbc2b {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 961.054368] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9856de43-da0b-4ba8-bebb-a8da2e632c44 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.063011] env[69994]: DEBUG oslo_vmware.api [None req-a62a7d39-ca72-4ba9-aca8-6a4020b7db8a tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 961.063011] env[69994]: value = "task-3242182" [ 961.063011] env[69994]: _type = "Task" [ 961.063011] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.073488] env[69994]: DEBUG oslo_vmware.api [None req-a62a7d39-ca72-4ba9-aca8-6a4020b7db8a tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242182, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.326663] env[69994]: DEBUG oslo_concurrency.lockutils [None req-edff0ddf-e5ac-4235-9a8c-c51e723671e8 tempest-ServersWithSpecificFlavorTestJSON-1802466197 tempest-ServersWithSpecificFlavorTestJSON-1802466197-project-member] Lock "a4544bc9-6935-4825-9b45-2054d2ced330" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.230s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 961.397611] env[69994]: DEBUG oslo_vmware.api [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': task-3242178, 'name': PowerOnVM_Task, 'duration_secs': 0.46926} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.397813] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 961.398027] env[69994]: INFO nova.compute.manager [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Took 5.36 seconds to spawn the instance on the hypervisor. [ 961.398234] env[69994]: DEBUG nova.compute.manager [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 961.399117] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-104f6729-4a74-4402-a361-78748a430245 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.410908] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 961.410908] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52bd4b9d-9b6f-470d-c493-eea1cf2c86de" [ 961.410908] env[69994]: _type = "HttpNfcLease" [ 961.410908] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 961.412405] env[69994]: DEBUG oslo_vmware.rw_handles [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 961.412405] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52bd4b9d-9b6f-470d-c493-eea1cf2c86de" [ 961.412405] env[69994]: _type = "HttpNfcLease" [ 961.412405] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 961.415789] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-949b5940-ada7-472c-a6ac-d5c5df0a4d58 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.426586] env[69994]: DEBUG oslo_vmware.rw_handles [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52454f7c-4589-4469-55b8-eced386046ae/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 961.426780] env[69994]: DEBUG oslo_vmware.rw_handles [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52454f7c-4589-4469-55b8-eced386046ae/disk-0.vmdk for reading. {{(pid=69994) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 961.501778] env[69994]: DEBUG oslo_vmware.api [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3242180, 'name': Rename_Task, 'duration_secs': 0.204258} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.502090] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 961.502338] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3cf70f89-2a61-485e-9fe6-d58d838aa442 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.511507] env[69994]: DEBUG oslo_vmware.api [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for the task: (returnval){ [ 961.511507] env[69994]: value = "task-3242183" [ 961.511507] env[69994]: _type = "Task" [ 961.511507] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.521173] env[69994]: DEBUG oslo_vmware.api [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3242183, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.546974] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b5258542-dbf4-464d-9d0e-9e4b2e0fd1c2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.573970] env[69994]: DEBUG oslo_vmware.api [None req-a62a7d39-ca72-4ba9-aca8-6a4020b7db8a tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242182, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.177867} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.576505] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-a62a7d39-ca72-4ba9-aca8-6a4020b7db8a tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 961.576698] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a62a7d39-ca72-4ba9-aca8-6a4020b7db8a tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 961.576941] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a62a7d39-ca72-4ba9-aca8-6a4020b7db8a tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 961.577146] env[69994]: INFO nova.compute.manager [None req-a62a7d39-ca72-4ba9-aca8-6a4020b7db8a tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Took 1.13 seconds to destroy the instance on the hypervisor. [ 961.577390] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a62a7d39-ca72-4ba9-aca8-6a4020b7db8a tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 961.582762] env[69994]: DEBUG nova.compute.manager [-] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 961.582873] env[69994]: DEBUG nova.network.neutron [-] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 961.670191] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8355c9f1-b52a-4c7d-9c43-8799a862c03c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.678547] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89e5a91f-b6d6-4a41-89a9-22df28d5ccab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.709431] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a14a764e-4bb0-4681-8bdc-96041bf05964 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.719153] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b710a2cf-a1f4-4760-bb5a-6145fdf00e33 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.735320] env[69994]: INFO nova.compute.manager [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Rebuilding instance [ 961.737427] env[69994]: DEBUG nova.compute.provider_tree [None req-abd1bb36-78da-4bc9-bb56-673c0b9ea9e4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 961.782443] env[69994]: DEBUG nova.compute.manager [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 961.784369] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc2a50b7-9281-4ae5-be33-e958e50ff464 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.930529] env[69994]: INFO nova.compute.manager [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Took 25.59 seconds to build instance. [ 961.974501] env[69994]: DEBUG nova.compute.manager [req-1d1cc587-7a4d-4430-a1eb-2e127290bfc7 req-2a26a3e9-8cb4-4ccd-a206-061da98dd0e8 service nova] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Received event network-vif-deleted-601a8fe0-5f6c-4f29-860b-193a32a1e99c {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 961.974851] env[69994]: INFO nova.compute.manager [req-1d1cc587-7a4d-4430-a1eb-2e127290bfc7 req-2a26a3e9-8cb4-4ccd-a206-061da98dd0e8 service nova] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Neutron deleted interface 601a8fe0-5f6c-4f29-860b-193a32a1e99c; detaching it from the instance and deleting it from the info cache [ 961.975111] env[69994]: DEBUG nova.network.neutron [req-1d1cc587-7a4d-4430-a1eb-2e127290bfc7 req-2a26a3e9-8cb4-4ccd-a206-061da98dd0e8 service nova] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.024654] env[69994]: DEBUG oslo_vmware.api [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3242183, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.242035] env[69994]: DEBUG nova.scheduler.client.report [None req-abd1bb36-78da-4bc9-bb56-673c0b9ea9e4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 962.435083] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fbfdd0a9-8b61-41eb-b20d-42159908d478 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Lock "ee7e0c02-ef19-4475-a936-f591c8185797" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.100s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 962.450817] env[69994]: DEBUG nova.network.neutron [-] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.478373] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-81ee6f46-60fe-4cb8-92ea-a48975813e84 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.490758] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6fb2dd8-e30c-419e-bdda-8f22df5f4595 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.530014] env[69994]: DEBUG nova.compute.manager [req-1d1cc587-7a4d-4430-a1eb-2e127290bfc7 req-2a26a3e9-8cb4-4ccd-a206-061da98dd0e8 service nova] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Detach interface failed, port_id=601a8fe0-5f6c-4f29-860b-193a32a1e99c, reason: Instance e8caf244-413b-49bb-bdff-79aca0ccbc2b could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 962.536972] env[69994]: DEBUG oslo_vmware.api [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3242183, 'name': PowerOnVM_Task, 'duration_secs': 0.678899} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.537265] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 962.537488] env[69994]: INFO nova.compute.manager [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Took 8.95 seconds to spawn the instance on the hypervisor. [ 962.537678] env[69994]: DEBUG nova.compute.manager [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 962.538518] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0e6b3ac-ff34-4c2f-9c69-d9c43040735e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.747251] env[69994]: DEBUG oslo_concurrency.lockutils [None req-abd1bb36-78da-4bc9-bb56-673c0b9ea9e4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.949s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 962.750887] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.276s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 962.752662] env[69994]: DEBUG nova.objects.instance [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Lazy-loading 'resources' on Instance uuid 25a64898-568e-4095-aace-f8a564cdf916 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 962.776589] env[69994]: INFO nova.scheduler.client.report [None req-abd1bb36-78da-4bc9-bb56-673c0b9ea9e4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Deleted allocations for instance dbad6bed-64ba-4dfd-abad-c0b2c775ba2c [ 962.804474] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 962.804474] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-560e6fe5-8076-4e72-981e-330d84cce5da {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.811231] env[69994]: DEBUG oslo_vmware.api [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 962.811231] env[69994]: value = "task-3242184" [ 962.811231] env[69994]: _type = "Task" [ 962.811231] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.823678] env[69994]: DEBUG oslo_vmware.api [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242184, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.901692] env[69994]: INFO nova.compute.manager [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Rebuilding instance [ 962.950832] env[69994]: DEBUG nova.compute.manager [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 962.951738] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86b7af2a-108d-4941-a842-91e41c67da84 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.954538] env[69994]: INFO nova.compute.manager [-] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Took 1.37 seconds to deallocate network for instance. [ 963.063543] env[69994]: INFO nova.compute.manager [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Took 28.02 seconds to build instance. [ 963.285904] env[69994]: DEBUG oslo_concurrency.lockutils [None req-abd1bb36-78da-4bc9-bb56-673c0b9ea9e4 tempest-MigrationsAdminTest-715509558 tempest-MigrationsAdminTest-715509558-project-member] Lock "dbad6bed-64ba-4dfd-abad-c0b2c775ba2c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.397s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 963.330056] env[69994]: DEBUG oslo_vmware.api [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242184, 'name': PowerOffVM_Task, 'duration_secs': 0.402888} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.330056] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 963.330056] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 963.330056] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e228ba1-aa8b-488a-a51d-ada30833eae6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.346155] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 963.346462] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-987a9849-6a7d-4dd6-b4ed-b9ed654b7b7d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.447846] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 963.448215] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 963.448215] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Deleting the datastore file [datastore1] 4b3addd0-22b0-4793-af75-dba381c4a83f {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 963.448458] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-66192544-e082-475a-a5d7-ccd7f7921dab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.462322] env[69994]: DEBUG oslo_vmware.api [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 963.462322] env[69994]: value = "task-3242186" [ 963.462322] env[69994]: _type = "Task" [ 963.462322] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.466971] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a62a7d39-ca72-4ba9-aca8-6a4020b7db8a tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 963.479634] env[69994]: DEBUG oslo_vmware.api [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242186, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.566407] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d911758-80f9-4ae1-9a99-75828c20cec5 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "29071eb9-6334-4c23-acb4-142c12aa448d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.528s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 963.567332] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1cee9a2-8254-4aa1-a39e-057867c1b2b6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.579500] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab455404-a500-4b34-8c47-5765a161110f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.617351] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5b4cbdf-7989-4254-90ba-88144f9e6847 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.627679] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d807846d-2f91-4ca2-9d68-0aa5e9b975e2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.643282] env[69994]: DEBUG nova.compute.provider_tree [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 963.979600] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 963.979968] env[69994]: DEBUG oslo_vmware.api [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242186, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.376084} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.980210] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bdf53dfd-5ef2-41aa-aacd-3ddf2dc377a9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.985855] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 963.986926] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 963.987261] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 964.004195] env[69994]: DEBUG oslo_vmware.api [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Waiting for the task: (returnval){ [ 964.004195] env[69994]: value = "task-3242187" [ 964.004195] env[69994]: _type = "Task" [ 964.004195] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.017328] env[69994]: DEBUG oslo_vmware.api [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': task-3242187, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.146804] env[69994]: DEBUG nova.scheduler.client.report [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 964.224225] env[69994]: DEBUG nova.compute.manager [req-0f545df9-9c38-4e27-ac0d-ca300c1c0282 req-d9db4347-53ff-4d6d-afdd-51eb5eb8c187 service nova] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Received event network-changed-678dab49-879d-4408-9488-fef42c017965 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 964.224449] env[69994]: DEBUG nova.compute.manager [req-0f545df9-9c38-4e27-ac0d-ca300c1c0282 req-d9db4347-53ff-4d6d-afdd-51eb5eb8c187 service nova] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Refreshing instance network info cache due to event network-changed-678dab49-879d-4408-9488-fef42c017965. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 964.224677] env[69994]: DEBUG oslo_concurrency.lockutils [req-0f545df9-9c38-4e27-ac0d-ca300c1c0282 req-d9db4347-53ff-4d6d-afdd-51eb5eb8c187 service nova] Acquiring lock "refresh_cache-29071eb9-6334-4c23-acb4-142c12aa448d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.224912] env[69994]: DEBUG oslo_concurrency.lockutils [req-0f545df9-9c38-4e27-ac0d-ca300c1c0282 req-d9db4347-53ff-4d6d-afdd-51eb5eb8c187 service nova] Acquired lock "refresh_cache-29071eb9-6334-4c23-acb4-142c12aa448d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 964.224977] env[69994]: DEBUG nova.network.neutron [req-0f545df9-9c38-4e27-ac0d-ca300c1c0282 req-d9db4347-53ff-4d6d-afdd-51eb5eb8c187 service nova] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Refreshing network info cache for port 678dab49-879d-4408-9488-fef42c017965 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 964.525578] env[69994]: DEBUG oslo_vmware.api [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': task-3242187, 'name': PowerOffVM_Task, 'duration_secs': 0.127989} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.525578] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 964.525578] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 964.525578] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a99a2d3c-b50b-4599-a399-5c4587923286 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.539131] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 964.539131] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-71f498c9-0a5e-47f1-af5a-ecd7c9e4ce3a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.578470] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 964.578470] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 964.578470] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Deleting the datastore file [datastore1] ee7e0c02-ef19-4475-a936-f591c8185797 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 964.578470] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2a68a5d0-9d5c-4e36-8b84-d7596c886f82 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.585705] env[69994]: DEBUG oslo_vmware.api [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Waiting for the task: (returnval){ [ 964.585705] env[69994]: value = "task-3242189" [ 964.585705] env[69994]: _type = "Task" [ 964.585705] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.595929] env[69994]: DEBUG oslo_vmware.api [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': task-3242189, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.652270] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.902s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 964.654745] env[69994]: DEBUG oslo_concurrency.lockutils [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.339s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 964.656318] env[69994]: INFO nova.compute.claims [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 964.677373] env[69994]: INFO nova.scheduler.client.report [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Deleted allocations for instance 25a64898-568e-4095-aace-f8a564cdf916 [ 965.035524] env[69994]: DEBUG nova.virt.hardware [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 965.035762] env[69994]: DEBUG nova.virt.hardware [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 965.035922] env[69994]: DEBUG nova.virt.hardware [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 965.036115] env[69994]: DEBUG nova.virt.hardware [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 965.036265] env[69994]: DEBUG nova.virt.hardware [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 965.036455] env[69994]: DEBUG nova.virt.hardware [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 965.036730] env[69994]: DEBUG nova.virt.hardware [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 965.036971] env[69994]: DEBUG nova.virt.hardware [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 965.037165] env[69994]: DEBUG nova.virt.hardware [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 965.037456] env[69994]: DEBUG nova.virt.hardware [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 965.037880] env[69994]: DEBUG nova.virt.hardware [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 965.039322] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f2ee953-6cd0-4649-95d4-cee7859b02d6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.049475] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3bcd1d4-c401-45cc-b248-ed04a3b866e7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.066394] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b1:8a:89', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '87bbf4e0-9064-4516-b7e7-44973f817205', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'acadedcd-b660-437d-be25-176371cec4b3', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 965.074132] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 965.074435] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 965.074731] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d27b1004-76c3-4c68-b6c5-98ce8b2ef9c2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.107930] env[69994]: DEBUG oslo_vmware.api [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': task-3242189, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.20066} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.109441] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 965.109686] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 965.109850] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 965.112354] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 965.112354] env[69994]: value = "task-3242190" [ 965.112354] env[69994]: _type = "Task" [ 965.112354] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.123071] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242190, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.186270] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0b695f0e-9064-471b-896e-3bd28329e637 tempest-ServersTestBootFromVolume-1768305709 tempest-ServersTestBootFromVolume-1768305709-project-member] Lock "25a64898-568e-4095-aace-f8a564cdf916" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.747s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 965.193349] env[69994]: DEBUG nova.network.neutron [req-0f545df9-9c38-4e27-ac0d-ca300c1c0282 req-d9db4347-53ff-4d6d-afdd-51eb5eb8c187 service nova] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Updated VIF entry in instance network info cache for port 678dab49-879d-4408-9488-fef42c017965. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 965.193613] env[69994]: DEBUG nova.network.neutron [req-0f545df9-9c38-4e27-ac0d-ca300c1c0282 req-d9db4347-53ff-4d6d-afdd-51eb5eb8c187 service nova] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Updating instance_info_cache with network_info: [{"id": "678dab49-879d-4408-9488-fef42c017965", "address": "fa:16:3e:2c:64:94", "network": {"id": "6e027deb-e9dd-4d16-a0cf-5c75f1d53722", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1614407267-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.201", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7799f51750bb4c2589042a3b7bc8af01", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "721c6720-3ce0-450e-9951-a894f03acc27", "external-id": "nsx-vlan-transportzone-394", "segmentation_id": 394, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap678dab49-87", "ovs_interfaceid": "678dab49-879d-4408-9488-fef42c017965", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.629862] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242190, 'name': CreateVM_Task, 'duration_secs': 0.326028} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.630316] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 965.630797] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.630969] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 965.631327] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 965.631592] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b93c03ee-c668-41f0-935e-3958aedd2212 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.637235] env[69994]: DEBUG oslo_vmware.api [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 965.637235] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52efda00-99c9-7403-8656-ab0bcc6a6b7a" [ 965.637235] env[69994]: _type = "Task" [ 965.637235] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.647429] env[69994]: DEBUG oslo_vmware.api [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52efda00-99c9-7403-8656-ab0bcc6a6b7a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.695931] env[69994]: DEBUG oslo_concurrency.lockutils [req-0f545df9-9c38-4e27-ac0d-ca300c1c0282 req-d9db4347-53ff-4d6d-afdd-51eb5eb8c187 service nova] Releasing lock "refresh_cache-29071eb9-6334-4c23-acb4-142c12aa448d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 965.915014] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b093d52-daea-4a30-916e-9b16a69ea0ac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.926635] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35999be1-b8d4-46bd-9585-336289622bea {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.960521] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dee39b38-b72e-40b0-97b2-1c3e08bc3ba4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.970596] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66e21743-b65c-4e90-949e-d4f4d6ea6696 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.985171] env[69994]: DEBUG nova.compute.provider_tree [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 966.149644] env[69994]: DEBUG oslo_vmware.api [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52efda00-99c9-7403-8656-ab0bcc6a6b7a, 'name': SearchDatastore_Task, 'duration_secs': 0.02257} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.150100] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 966.150407] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 966.151283] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.151283] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 966.151283] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 966.151452] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5564c27b-41fd-4362-8971-0bfe93212bb7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.169019] env[69994]: DEBUG nova.virt.hardware [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 966.169019] env[69994]: DEBUG nova.virt.hardware [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 966.169019] env[69994]: DEBUG nova.virt.hardware [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 966.169019] env[69994]: DEBUG nova.virt.hardware [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 966.169019] env[69994]: DEBUG nova.virt.hardware [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 966.169019] env[69994]: DEBUG nova.virt.hardware [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 966.169019] env[69994]: DEBUG nova.virt.hardware [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 966.169019] env[69994]: DEBUG nova.virt.hardware [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 966.169673] env[69994]: DEBUG nova.virt.hardware [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 966.169794] env[69994]: DEBUG nova.virt.hardware [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 966.170040] env[69994]: DEBUG nova.virt.hardware [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 966.170908] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44e629e5-0a62-4a02-a325-d1c140998d1b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.175535] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 966.175797] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 966.176946] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f29a23db-e3db-41ac-9d8e-bfde0d37ff0e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.183186] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dff4a579-7020-4805-8ee1-21d6da5028f2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.190438] env[69994]: DEBUG oslo_vmware.api [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 966.190438] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52da2f0e-7c31-55cb-1fce-a2f7f215983e" [ 966.190438] env[69994]: _type = "Task" [ 966.190438] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.205364] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Instance VIF info [] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 966.211825] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 966.212829] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 966.213797] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5b4ff9f9-2484-48ac-a7da-354bb2b778cc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.232409] env[69994]: DEBUG oslo_vmware.api [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52da2f0e-7c31-55cb-1fce-a2f7f215983e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.239722] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 966.239722] env[69994]: value = "task-3242191" [ 966.239722] env[69994]: _type = "Task" [ 966.239722] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.249570] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242191, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.491018] env[69994]: DEBUG nova.scheduler.client.report [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 966.704215] env[69994]: DEBUG oslo_vmware.api [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52da2f0e-7c31-55cb-1fce-a2f7f215983e, 'name': SearchDatastore_Task, 'duration_secs': 0.031685} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.704215] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78a4a7f9-00ec-45f2-9f97-627c81ea3223 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.715016] env[69994]: DEBUG oslo_vmware.api [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 966.715016] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5226d3fe-911e-c2f6-a20d-72e50fb7c1a2" [ 966.715016] env[69994]: _type = "Task" [ 966.715016] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.726267] env[69994]: DEBUG oslo_vmware.api [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5226d3fe-911e-c2f6-a20d-72e50fb7c1a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.751401] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242191, 'name': CreateVM_Task, 'duration_secs': 0.322647} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.751666] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 966.752120] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.752282] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 966.752607] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 966.752875] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-514586ec-16b4-4b4d-ad41-21df2b03e898 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.758648] env[69994]: DEBUG oslo_vmware.api [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Waiting for the task: (returnval){ [ 966.758648] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5216366c-f4e9-9098-5055-592750bacb1b" [ 966.758648] env[69994]: _type = "Task" [ 966.758648] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.767473] env[69994]: DEBUG oslo_vmware.api [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5216366c-f4e9-9098-5055-592750bacb1b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.998355] env[69994]: DEBUG oslo_concurrency.lockutils [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.343s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 966.999290] env[69994]: DEBUG nova.compute.manager [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 967.002279] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.480s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 967.003862] env[69994]: DEBUG nova.objects.instance [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Lazy-loading 'resources' on Instance uuid 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 967.228055] env[69994]: DEBUG oslo_vmware.api [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5226d3fe-911e-c2f6-a20d-72e50fb7c1a2, 'name': SearchDatastore_Task, 'duration_secs': 0.036289} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.228055] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 967.228055] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 4b3addd0-22b0-4793-af75-dba381c4a83f/4b3addd0-22b0-4793-af75-dba381c4a83f.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 967.228055] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8f7efd95-838e-48dc-97cf-f940477f107e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.237865] env[69994]: DEBUG oslo_vmware.api [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 967.237865] env[69994]: value = "task-3242192" [ 967.237865] env[69994]: _type = "Task" [ 967.237865] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.249464] env[69994]: DEBUG oslo_vmware.api [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242192, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.271637] env[69994]: DEBUG oslo_vmware.api [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5216366c-f4e9-9098-5055-592750bacb1b, 'name': SearchDatastore_Task, 'duration_secs': 0.0377} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.272650] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 967.275431] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 967.275684] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 967.275835] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 967.276034] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 967.276323] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b84ed919-88fd-4e12-970d-8692594b2b7a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.291261] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 967.291261] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 967.291261] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea358108-740f-4545-84d7-37f4f5990c47 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.296733] env[69994]: DEBUG oslo_vmware.api [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Waiting for the task: (returnval){ [ 967.296733] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]521269b7-1ed1-f49e-683c-cebfb72254e5" [ 967.296733] env[69994]: _type = "Task" [ 967.296733] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.304891] env[69994]: DEBUG oslo_vmware.api [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521269b7-1ed1-f49e-683c-cebfb72254e5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.505152] env[69994]: DEBUG nova.compute.utils [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 967.506842] env[69994]: DEBUG nova.objects.instance [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Lazy-loading 'numa_topology' on Instance uuid 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 967.508445] env[69994]: DEBUG nova.compute.manager [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 967.508445] env[69994]: DEBUG nova.network.neutron [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 967.647623] env[69994]: DEBUG nova.policy [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b796608acf23444d909343ec20e84175', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '978da68b62d8409da5d8c8a45cd985c0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 967.757089] env[69994]: DEBUG oslo_vmware.api [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242192, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.810226] env[69994]: DEBUG oslo_vmware.api [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521269b7-1ed1-f49e-683c-cebfb72254e5, 'name': SearchDatastore_Task, 'duration_secs': 0.016081} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.811321] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-996c4b5b-d4ab-48d2-aa78-4569f9311d37 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.819042] env[69994]: DEBUG oslo_vmware.api [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Waiting for the task: (returnval){ [ 967.819042] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52bccc78-7277-9c61-e77e-7212865fd926" [ 967.819042] env[69994]: _type = "Task" [ 967.819042] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.831825] env[69994]: DEBUG oslo_vmware.api [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52bccc78-7277-9c61-e77e-7212865fd926, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.009622] env[69994]: DEBUG nova.compute.manager [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 968.013197] env[69994]: DEBUG nova.objects.base [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Object Instance<6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e> lazy-loaded attributes: resources,numa_topology {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 968.251390] env[69994]: DEBUG oslo_vmware.api [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242192, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.324471] env[69994]: DEBUG nova.network.neutron [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Successfully created port: c14a1492-8af8-4c93-bf0d-f2424cd1f335 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 968.333920] env[69994]: DEBUG oslo_vmware.api [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52bccc78-7277-9c61-e77e-7212865fd926, 'name': SearchDatastore_Task, 'duration_secs': 0.058804} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.335185] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 968.335442] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] ee7e0c02-ef19-4475-a936-f591c8185797/ee7e0c02-ef19-4475-a936-f591c8185797.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 968.336270] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27d4d03b-c7f6-407d-93d2-fcde347a935e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.338901] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-224c346b-37fb-46c2-badd-1706e9345d68 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.346735] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2ce4ee0-fe12-4e14-800b-d86b0974b8b1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.350425] env[69994]: DEBUG oslo_vmware.api [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Waiting for the task: (returnval){ [ 968.350425] env[69994]: value = "task-3242193" [ 968.350425] env[69994]: _type = "Task" [ 968.350425] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.386927] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3843efd2-546a-4e3a-a45e-4bfbc8a4043e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.392878] env[69994]: DEBUG oslo_vmware.api [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': task-3242193, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.398097] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75d2b662-5c72-4fa2-8427-dbacbad8b1a7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.412309] env[69994]: DEBUG nova.compute.provider_tree [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 968.752633] env[69994]: DEBUG oslo_vmware.api [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242192, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.848371] env[69994]: DEBUG nova.network.neutron [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Successfully created port: aa9589d7-176c-4249-9a3a-0af202829e70 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 968.863351] env[69994]: DEBUG oslo_vmware.api [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': task-3242193, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.916226] env[69994]: DEBUG nova.scheduler.client.report [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 969.023520] env[69994]: DEBUG nova.compute.manager [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 969.054947] env[69994]: DEBUG nova.virt.hardware [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 969.055836] env[69994]: DEBUG nova.virt.hardware [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 969.056082] env[69994]: DEBUG nova.virt.hardware [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 969.056342] env[69994]: DEBUG nova.virt.hardware [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 969.056533] env[69994]: DEBUG nova.virt.hardware [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 969.056721] env[69994]: DEBUG nova.virt.hardware [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 969.056985] env[69994]: DEBUG nova.virt.hardware [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 969.057229] env[69994]: DEBUG nova.virt.hardware [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 969.057437] env[69994]: DEBUG nova.virt.hardware [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 969.057663] env[69994]: DEBUG nova.virt.hardware [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 969.057951] env[69994]: DEBUG nova.virt.hardware [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 969.058969] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-153f0a03-d7a7-4c41-9e54-16477d66aa4a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.071995] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2805b450-2d8c-4354-9b8e-96a81170674d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.250149] env[69994]: DEBUG oslo_vmware.api [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242192, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.770096} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.250421] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 4b3addd0-22b0-4793-af75-dba381c4a83f/4b3addd0-22b0-4793-af75-dba381c4a83f.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 969.250651] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 969.250926] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a1d03eb1-c324-44af-8de0-5657bf636b56 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.266181] env[69994]: DEBUG oslo_vmware.api [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 969.266181] env[69994]: value = "task-3242194" [ 969.266181] env[69994]: _type = "Task" [ 969.266181] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.278388] env[69994]: DEBUG oslo_vmware.api [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242194, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.284739] env[69994]: DEBUG nova.network.neutron [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Successfully created port: f249cccf-9606-4021-91bd-19028a6c4cbf {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 969.369875] env[69994]: DEBUG oslo_vmware.api [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': task-3242193, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.424021] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.421s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 969.429071] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.023s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 969.431422] env[69994]: INFO nova.compute.claims [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 969.748087] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Acquiring lock "095e75b1-7806-4d1d-ab9e-49735f7aa0f3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 969.748087] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Lock "095e75b1-7806-4d1d-ab9e-49735f7aa0f3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 969.779378] env[69994]: DEBUG oslo_vmware.api [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242194, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08759} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.779378] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 969.779378] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2274292-5958-473f-aed9-30602a8b84ac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.806216] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] 4b3addd0-22b0-4793-af75-dba381c4a83f/4b3addd0-22b0-4793-af75-dba381c4a83f.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 969.806216] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f921484f-d4ad-4322-b402-ecdb93256d1f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.829494] env[69994]: DEBUG oslo_vmware.api [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 969.829494] env[69994]: value = "task-3242195" [ 969.829494] env[69994]: _type = "Task" [ 969.829494] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.841424] env[69994]: DEBUG oslo_vmware.api [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242195, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.866858] env[69994]: DEBUG oslo_vmware.api [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': task-3242193, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.323968} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.866858] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] ee7e0c02-ef19-4475-a936-f591c8185797/ee7e0c02-ef19-4475-a936-f591c8185797.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 969.866858] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 969.866858] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b8932662-3299-4a0b-979b-79b58be44020 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.876513] env[69994]: DEBUG oslo_vmware.api [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Waiting for the task: (returnval){ [ 969.876513] env[69994]: value = "task-3242196" [ 969.876513] env[69994]: _type = "Task" [ 969.876513] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.892898] env[69994]: DEBUG oslo_vmware.api [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': task-3242196, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.947226] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2f7930fb-47db-469e-9ac8-5c62f940b8bf tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Lock "6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 32.503s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 969.949247] env[69994]: DEBUG oslo_concurrency.lockutils [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Lock "6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 12.031s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 969.950064] env[69994]: INFO nova.compute.manager [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Unshelving [ 970.252189] env[69994]: DEBUG nova.compute.manager [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 970.344563] env[69994]: DEBUG oslo_vmware.api [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242195, 'name': ReconfigVM_Task, 'duration_secs': 0.344625} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.345256] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Reconfigured VM instance instance-0000004a to attach disk [datastore2] 4b3addd0-22b0-4793-af75-dba381c4a83f/4b3addd0-22b0-4793-af75-dba381c4a83f.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 970.346474] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ed35aba9-def8-4e90-9cc0-5c51f21ce06a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.359021] env[69994]: DEBUG oslo_vmware.api [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 970.359021] env[69994]: value = "task-3242197" [ 970.359021] env[69994]: _type = "Task" [ 970.359021] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.376044] env[69994]: DEBUG oslo_vmware.api [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242197, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.388593] env[69994]: DEBUG oslo_vmware.api [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': task-3242196, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072996} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.389234] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 970.390552] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5b968bf-fe98-4837-92d1-ddbc7341455c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.418941] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Reconfiguring VM instance instance-0000004c to attach disk [datastore2] ee7e0c02-ef19-4475-a936-f591c8185797/ee7e0c02-ef19-4475-a936-f591c8185797.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 970.419774] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5527c2ae-fe19-4f45-b522-10bdd62e133e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.447032] env[69994]: DEBUG oslo_vmware.api [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Waiting for the task: (returnval){ [ 970.447032] env[69994]: value = "task-3242198" [ 970.447032] env[69994]: _type = "Task" [ 970.447032] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.464886] env[69994]: DEBUG oslo_vmware.api [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': task-3242198, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.746659] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb6a600a-aa8a-47bd-a68d-17888e7bb1f1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.770307] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e066637-5656-4feb-9cc0-5caac7b365a2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.815072] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 970.815946] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f15af307-af70-4523-92dc-edb405ccab7f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.826572] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e9bb3c9-8ad7-463a-a7c3-80f967489e82 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.842306] env[69994]: DEBUG nova.compute.provider_tree [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 970.869801] env[69994]: DEBUG oslo_vmware.api [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242197, 'name': Rename_Task, 'duration_secs': 0.167361} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.870509] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 970.870509] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1e8ef18f-1b94-45ce-bb64-162d292eaa8e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.878141] env[69994]: DEBUG oslo_vmware.api [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 970.878141] env[69994]: value = "task-3242199" [ 970.878141] env[69994]: _type = "Task" [ 970.878141] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.890467] env[69994]: DEBUG oslo_vmware.api [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242199, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.956533] env[69994]: DEBUG oslo_vmware.api [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': task-3242198, 'name': ReconfigVM_Task, 'duration_secs': 0.31215} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.959977] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Reconfigured VM instance instance-0000004c to attach disk [datastore2] ee7e0c02-ef19-4475-a936-f591c8185797/ee7e0c02-ef19-4475-a936-f591c8185797.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 970.959977] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-893dc667-2ac4-4907-a59a-d759535bab9c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.969109] env[69994]: DEBUG oslo_vmware.api [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Waiting for the task: (returnval){ [ 970.969109] env[69994]: value = "task-3242200" [ 970.969109] env[69994]: _type = "Task" [ 970.969109] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.980074] env[69994]: DEBUG oslo_vmware.api [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': task-3242200, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.983768] env[69994]: DEBUG oslo_concurrency.lockutils [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 971.372932] env[69994]: ERROR nova.scheduler.client.report [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] [req-f04ef9b1-8002-428a-ac42-02974cb724ba] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 92ce3c95-4efe-4d04-802b-6b187afc5aa7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f04ef9b1-8002-428a-ac42-02974cb724ba"}]} [ 971.393455] env[69994]: DEBUG oslo_vmware.api [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242199, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.397913] env[69994]: DEBUG nova.scheduler.client.report [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Refreshing inventories for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 971.413983] env[69994]: DEBUG nova.scheduler.client.report [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Updating ProviderTree inventory for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 971.414252] env[69994]: DEBUG nova.compute.provider_tree [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 971.436640] env[69994]: DEBUG nova.scheduler.client.report [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Refreshing aggregate associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 971.470039] env[69994]: DEBUG nova.scheduler.client.report [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Refreshing trait associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 971.484335] env[69994]: DEBUG oslo_vmware.api [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': task-3242200, 'name': Rename_Task, 'duration_secs': 0.141821} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.484741] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 971.485056] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b1bb2000-121b-4687-8ad4-cad5abc39646 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.494840] env[69994]: DEBUG oslo_vmware.api [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Waiting for the task: (returnval){ [ 971.494840] env[69994]: value = "task-3242201" [ 971.494840] env[69994]: _type = "Task" [ 971.494840] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.511907] env[69994]: DEBUG oslo_vmware.api [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': task-3242201, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.844145] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5d8cd3b-2e37-469e-9ce2-87682fab8750 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.851890] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41a326a4-5305-4742-a6d6-447073a687f6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.909086] env[69994]: DEBUG nova.network.neutron [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Successfully updated port: c14a1492-8af8-4c93-bf0d-f2424cd1f335 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 971.920640] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaf284a7-a692-4222-8b8f-cf44e4100af2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.936575] env[69994]: DEBUG oslo_vmware.api [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242199, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.936575] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-200e972a-174e-496c-903c-923f73d662e1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.956194] env[69994]: DEBUG nova.compute.provider_tree [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 972.010482] env[69994]: DEBUG oslo_vmware.api [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': task-3242201, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.078206] env[69994]: DEBUG nova.compute.manager [req-9e8538da-22bb-41c8-b181-1722696ce0fb req-4ba97941-da26-4828-b00c-b723699c227b service nova] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Received event network-vif-plugged-c14a1492-8af8-4c93-bf0d-f2424cd1f335 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 972.078206] env[69994]: DEBUG oslo_concurrency.lockutils [req-9e8538da-22bb-41c8-b181-1722696ce0fb req-4ba97941-da26-4828-b00c-b723699c227b service nova] Acquiring lock "0d42c1c7-2ac1-44f3-8311-929f141e0a65-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.078206] env[69994]: DEBUG oslo_concurrency.lockutils [req-9e8538da-22bb-41c8-b181-1722696ce0fb req-4ba97941-da26-4828-b00c-b723699c227b service nova] Lock "0d42c1c7-2ac1-44f3-8311-929f141e0a65-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 972.078206] env[69994]: DEBUG oslo_concurrency.lockutils [req-9e8538da-22bb-41c8-b181-1722696ce0fb req-4ba97941-da26-4828-b00c-b723699c227b service nova] Lock "0d42c1c7-2ac1-44f3-8311-929f141e0a65-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 972.078206] env[69994]: DEBUG nova.compute.manager [req-9e8538da-22bb-41c8-b181-1722696ce0fb req-4ba97941-da26-4828-b00c-b723699c227b service nova] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] No waiting events found dispatching network-vif-plugged-c14a1492-8af8-4c93-bf0d-f2424cd1f335 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 972.078206] env[69994]: WARNING nova.compute.manager [req-9e8538da-22bb-41c8-b181-1722696ce0fb req-4ba97941-da26-4828-b00c-b723699c227b service nova] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Received unexpected event network-vif-plugged-c14a1492-8af8-4c93-bf0d-f2424cd1f335 for instance with vm_state building and task_state spawning. [ 972.195708] env[69994]: DEBUG oslo_vmware.rw_handles [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52454f7c-4589-4469-55b8-eced386046ae/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 972.196824] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef073651-279c-416d-8f46-1e1246b67da8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.204472] env[69994]: DEBUG oslo_vmware.rw_handles [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52454f7c-4589-4469-55b8-eced386046ae/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 972.204650] env[69994]: ERROR oslo_vmware.rw_handles [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52454f7c-4589-4469-55b8-eced386046ae/disk-0.vmdk due to incomplete transfer. [ 972.204907] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-c323d355-7fb8-4641-a615-1991eba5f63e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.213517] env[69994]: DEBUG oslo_vmware.rw_handles [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52454f7c-4589-4469-55b8-eced386046ae/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 972.213727] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Uploaded image 0c454b0e-ff19-4782-b301-ec0768462b7b to the Glance image server {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 972.216220] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Destroying the VM {{(pid=69994) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 972.217151] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-299a3822-64bc-4454-b51a-54d15ccd83a0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.225457] env[69994]: DEBUG oslo_vmware.api [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 972.225457] env[69994]: value = "task-3242202" [ 972.225457] env[69994]: _type = "Task" [ 972.225457] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.234855] env[69994]: DEBUG oslo_vmware.api [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242202, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.418076] env[69994]: DEBUG oslo_vmware.api [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242199, 'name': PowerOnVM_Task, 'duration_secs': 1.067583} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.418371] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 972.418655] env[69994]: DEBUG nova.compute.manager [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 972.428165] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5df99136-6261-41bf-8c28-41e7e64051af {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.479417] env[69994]: ERROR nova.scheduler.client.report [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] [req-2e859f50-cb09-4b4d-bcd1-f438a24c74eb] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 92ce3c95-4efe-4d04-802b-6b187afc5aa7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-2e859f50-cb09-4b4d-bcd1-f438a24c74eb"}]} [ 972.503736] env[69994]: DEBUG nova.scheduler.client.report [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Refreshing inventories for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 972.514046] env[69994]: DEBUG oslo_vmware.api [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': task-3242201, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.514937] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "290e8749-6860-4303-b966-65d2efee5499" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.515363] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "290e8749-6860-4303-b966-65d2efee5499" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 972.523496] env[69994]: DEBUG nova.scheduler.client.report [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Updating ProviderTree inventory for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 972.523713] env[69994]: DEBUG nova.compute.provider_tree [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 972.543948] env[69994]: DEBUG nova.scheduler.client.report [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Refreshing aggregate associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 972.566498] env[69994]: DEBUG nova.scheduler.client.report [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Refreshing trait associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 972.743577] env[69994]: DEBUG oslo_vmware.api [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242202, 'name': Destroy_Task, 'duration_secs': 0.45031} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.744158] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Destroyed the VM [ 972.744158] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Deleting Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 972.745073] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5284b369-906c-419a-bfc1-868680a80c1a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.750991] env[69994]: DEBUG oslo_vmware.api [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 972.750991] env[69994]: value = "task-3242203" [ 972.750991] env[69994]: _type = "Task" [ 972.750991] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.759202] env[69994]: DEBUG oslo_vmware.api [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242203, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.842361] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dff308f6-3bb7-4ff0-9d2e-fb812f1b5f9f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.850953] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d242121-8bca-4b7d-af89-d075e2a328c1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.884399] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-333e1bbf-40fb-452a-af44-382f6eea3d75 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.892214] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ae61f4c-e635-48cf-9bf5-545fe713d00b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.907048] env[69994]: DEBUG nova.compute.provider_tree [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 972.949747] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 973.006934] env[69994]: DEBUG oslo_vmware.api [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': task-3242201, 'name': PowerOnVM_Task, 'duration_secs': 1.049121} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.007520] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 973.007795] env[69994]: DEBUG nova.compute.manager [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 973.008680] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01aa7284-b288-422a-9398-def60a8774a5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.017789] env[69994]: DEBUG nova.compute.manager [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 973.263026] env[69994]: DEBUG oslo_vmware.api [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242203, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.450221] env[69994]: DEBUG nova.scheduler.client.report [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Updated inventory for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with generation 104 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 973.450361] env[69994]: DEBUG nova.compute.provider_tree [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Updating resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 generation from 104 to 105 during operation: update_inventory {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 973.450645] env[69994]: DEBUG nova.compute.provider_tree [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 973.531423] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 973.546963] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 973.700250] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5171f02-4c58-453e-96dd-a41642fcabb8 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Acquiring lock "ee7e0c02-ef19-4475-a936-f591c8185797" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 973.700512] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5171f02-4c58-453e-96dd-a41642fcabb8 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Lock "ee7e0c02-ef19-4475-a936-f591c8185797" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 973.700752] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5171f02-4c58-453e-96dd-a41642fcabb8 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Acquiring lock "ee7e0c02-ef19-4475-a936-f591c8185797-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 973.700952] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5171f02-4c58-453e-96dd-a41642fcabb8 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Lock "ee7e0c02-ef19-4475-a936-f591c8185797-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 973.701136] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5171f02-4c58-453e-96dd-a41642fcabb8 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Lock "ee7e0c02-ef19-4475-a936-f591c8185797-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.703224] env[69994]: INFO nova.compute.manager [None req-a5171f02-4c58-453e-96dd-a41642fcabb8 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Terminating instance [ 973.761963] env[69994]: DEBUG oslo_vmware.api [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242203, 'name': RemoveSnapshot_Task, 'duration_secs': 0.745487} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.762231] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Deleted Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 973.762523] env[69994]: DEBUG nova.compute.manager [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 973.763359] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de10a6a8-0ff9-4205-b6ff-6024351c9898 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.959021] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.529s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.959021] env[69994]: DEBUG nova.compute.manager [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 973.960877] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.356s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 973.962944] env[69994]: INFO nova.compute.claims [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 974.040991] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8e6d9c45-9888-4ac5-92e0-8d5d60891b33 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "4b3addd0-22b0-4793-af75-dba381c4a83f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 974.042029] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8e6d9c45-9888-4ac5-92e0-8d5d60891b33 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "4b3addd0-22b0-4793-af75-dba381c4a83f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 974.042029] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8e6d9c45-9888-4ac5-92e0-8d5d60891b33 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "4b3addd0-22b0-4793-af75-dba381c4a83f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 974.042029] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8e6d9c45-9888-4ac5-92e0-8d5d60891b33 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "4b3addd0-22b0-4793-af75-dba381c4a83f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 974.042029] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8e6d9c45-9888-4ac5-92e0-8d5d60891b33 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "4b3addd0-22b0-4793-af75-dba381c4a83f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 974.043960] env[69994]: INFO nova.compute.manager [None req-8e6d9c45-9888-4ac5-92e0-8d5d60891b33 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Terminating instance [ 974.206826] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5171f02-4c58-453e-96dd-a41642fcabb8 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Acquiring lock "refresh_cache-ee7e0c02-ef19-4475-a936-f591c8185797" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.207031] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5171f02-4c58-453e-96dd-a41642fcabb8 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Acquired lock "refresh_cache-ee7e0c02-ef19-4475-a936-f591c8185797" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 974.207211] env[69994]: DEBUG nova.network.neutron [None req-a5171f02-4c58-453e-96dd-a41642fcabb8 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 974.279228] env[69994]: INFO nova.compute.manager [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Shelve offloading [ 974.301935] env[69994]: DEBUG nova.compute.manager [req-bac1ff13-9b0b-4035-942c-b8ffee15453a req-24a19773-395d-4c92-974c-adfa2f9a48db service nova] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Received event network-changed-c14a1492-8af8-4c93-bf0d-f2424cd1f335 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 974.302285] env[69994]: DEBUG nova.compute.manager [req-bac1ff13-9b0b-4035-942c-b8ffee15453a req-24a19773-395d-4c92-974c-adfa2f9a48db service nova] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Refreshing instance network info cache due to event network-changed-c14a1492-8af8-4c93-bf0d-f2424cd1f335. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 974.302404] env[69994]: DEBUG oslo_concurrency.lockutils [req-bac1ff13-9b0b-4035-942c-b8ffee15453a req-24a19773-395d-4c92-974c-adfa2f9a48db service nova] Acquiring lock "refresh_cache-0d42c1c7-2ac1-44f3-8311-929f141e0a65" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.302543] env[69994]: DEBUG oslo_concurrency.lockutils [req-bac1ff13-9b0b-4035-942c-b8ffee15453a req-24a19773-395d-4c92-974c-adfa2f9a48db service nova] Acquired lock "refresh_cache-0d42c1c7-2ac1-44f3-8311-929f141e0a65" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 974.302685] env[69994]: DEBUG nova.network.neutron [req-bac1ff13-9b0b-4035-942c-b8ffee15453a req-24a19773-395d-4c92-974c-adfa2f9a48db service nova] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Refreshing network info cache for port c14a1492-8af8-4c93-bf0d-f2424cd1f335 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 974.471310] env[69994]: DEBUG nova.compute.utils [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 974.476024] env[69994]: DEBUG nova.compute.manager [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 974.476024] env[69994]: DEBUG nova.network.neutron [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 974.529609] env[69994]: DEBUG nova.policy [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0cfc9192b337409aaf6be3b7bb48578f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd55223bc0a464f1fa4d3b200926fd64f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 974.550754] env[69994]: DEBUG nova.compute.manager [None req-8e6d9c45-9888-4ac5-92e0-8d5d60891b33 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 974.551728] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8e6d9c45-9888-4ac5-92e0-8d5d60891b33 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 974.553758] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-806b305a-d913-4b08-aac9-c92125ca5d37 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.561175] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e6d9c45-9888-4ac5-92e0-8d5d60891b33 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 974.561439] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ca7a169c-c46a-44a8-ae1f-d39881ebfc1a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.564973] env[69994]: DEBUG nova.network.neutron [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Successfully updated port: aa9589d7-176c-4249-9a3a-0af202829e70 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 974.572619] env[69994]: DEBUG oslo_vmware.api [None req-8e6d9c45-9888-4ac5-92e0-8d5d60891b33 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 974.572619] env[69994]: value = "task-3242204" [ 974.572619] env[69994]: _type = "Task" [ 974.572619] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.582770] env[69994]: DEBUG oslo_vmware.api [None req-8e6d9c45-9888-4ac5-92e0-8d5d60891b33 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242204, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.602120] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "8f5a5852-cd78-434f-b413-3cc2314575bb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 974.602759] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "8f5a5852-cd78-434f-b413-3cc2314575bb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 974.742202] env[69994]: DEBUG nova.network.neutron [None req-a5171f02-4c58-453e-96dd-a41642fcabb8 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 974.784207] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 974.784770] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d6fb9186-a39f-425e-8c23-6f581969f45c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.796030] env[69994]: DEBUG oslo_vmware.api [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 974.796030] env[69994]: value = "task-3242205" [ 974.796030] env[69994]: _type = "Task" [ 974.796030] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.807026] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] VM already powered off {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 974.807026] env[69994]: DEBUG nova.compute.manager [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 974.807026] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be87c086-b7e6-4046-9c13-ee34e9213e19 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.813364] env[69994]: DEBUG oslo_concurrency.lockutils [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "refresh_cache-e638fe4f-5f75-4d38-8a58-15dd66fd9e27" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.814053] env[69994]: DEBUG oslo_concurrency.lockutils [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquired lock "refresh_cache-e638fe4f-5f75-4d38-8a58-15dd66fd9e27" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 974.814517] env[69994]: DEBUG nova.network.neutron [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 974.831873] env[69994]: DEBUG nova.network.neutron [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Successfully created port: d4a8692e-1b97-42dd-a02a-53c07d85ad0a {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 974.860968] env[69994]: DEBUG nova.network.neutron [None req-a5171f02-4c58-453e-96dd-a41642fcabb8 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 974.894686] env[69994]: DEBUG nova.network.neutron [req-bac1ff13-9b0b-4035-942c-b8ffee15453a req-24a19773-395d-4c92-974c-adfa2f9a48db service nova] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 974.980659] env[69994]: DEBUG nova.compute.manager [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 975.022588] env[69994]: DEBUG nova.network.neutron [req-bac1ff13-9b0b-4035-942c-b8ffee15453a req-24a19773-395d-4c92-974c-adfa2f9a48db service nova] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.088993] env[69994]: DEBUG oslo_vmware.api [None req-8e6d9c45-9888-4ac5-92e0-8d5d60891b33 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242204, 'name': PowerOffVM_Task, 'duration_secs': 0.222369} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.089999] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e6d9c45-9888-4ac5-92e0-8d5d60891b33 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 975.089999] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8e6d9c45-9888-4ac5-92e0-8d5d60891b33 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 975.089999] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-27c6797c-9d8d-49fa-8185-e45f5bcbc182 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.104572] env[69994]: DEBUG nova.compute.manager [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 975.154473] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8e6d9c45-9888-4ac5-92e0-8d5d60891b33 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 975.154756] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8e6d9c45-9888-4ac5-92e0-8d5d60891b33 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 975.154975] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e6d9c45-9888-4ac5-92e0-8d5d60891b33 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Deleting the datastore file [datastore2] 4b3addd0-22b0-4793-af75-dba381c4a83f {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 975.155518] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-22ac4b6d-04be-4ca3-a588-ae34e35f99c9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.165762] env[69994]: DEBUG oslo_vmware.api [None req-8e6d9c45-9888-4ac5-92e0-8d5d60891b33 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 975.165762] env[69994]: value = "task-3242207" [ 975.165762] env[69994]: _type = "Task" [ 975.165762] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.174611] env[69994]: DEBUG oslo_vmware.api [None req-8e6d9c45-9888-4ac5-92e0-8d5d60891b33 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242207, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.298811] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c09532d-001d-4e14-af05-38efa00c1356 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.308155] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57cca718-9412-42bd-a5c1-9a8f5dfc4be3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.346578] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-103d3d0a-ed30-419e-8d53-5468da3471ca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.354695] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57a8a89c-b62a-41a8-9b65-597dd3bb0190 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.374021] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5171f02-4c58-453e-96dd-a41642fcabb8 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Releasing lock "refresh_cache-ee7e0c02-ef19-4475-a936-f591c8185797" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 975.377086] env[69994]: DEBUG nova.compute.manager [None req-a5171f02-4c58-453e-96dd-a41642fcabb8 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 975.377086] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a5171f02-4c58-453e-96dd-a41642fcabb8 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 975.377086] env[69994]: DEBUG nova.compute.provider_tree [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 975.377086] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8093da5d-4f3e-4f05-b3fd-bee34a801562 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.385497] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5171f02-4c58-453e-96dd-a41642fcabb8 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 975.385747] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5893f240-6807-45b6-a0ed-772478bc8a83 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.393558] env[69994]: DEBUG oslo_vmware.api [None req-a5171f02-4c58-453e-96dd-a41642fcabb8 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Waiting for the task: (returnval){ [ 975.393558] env[69994]: value = "task-3242208" [ 975.393558] env[69994]: _type = "Task" [ 975.393558] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.402693] env[69994]: DEBUG oslo_vmware.api [None req-a5171f02-4c58-453e-96dd-a41642fcabb8 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': task-3242208, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.529057] env[69994]: DEBUG oslo_concurrency.lockutils [req-bac1ff13-9b0b-4035-942c-b8ffee15453a req-24a19773-395d-4c92-974c-adfa2f9a48db service nova] Releasing lock "refresh_cache-0d42c1c7-2ac1-44f3-8311-929f141e0a65" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 975.569900] env[69994]: DEBUG oslo_concurrency.lockutils [None req-00e53e0b-c53a-486a-b8a6-7cf5ab725238 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "eff21ec5-a51d-4004-9edf-1891f706fe9c" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 975.570191] env[69994]: DEBUG oslo_concurrency.lockutils [None req-00e53e0b-c53a-486a-b8a6-7cf5ab725238 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "eff21ec5-a51d-4004-9edf-1891f706fe9c" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 975.570373] env[69994]: DEBUG nova.compute.manager [None req-00e53e0b-c53a-486a-b8a6-7cf5ab725238 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 975.571850] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9da732b-b4fe-41f7-9441-36aa0d2c02ba {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.578344] env[69994]: DEBUG nova.compute.manager [None req-00e53e0b-c53a-486a-b8a6-7cf5ab725238 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69994) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 975.582023] env[69994]: DEBUG nova.objects.instance [None req-00e53e0b-c53a-486a-b8a6-7cf5ab725238 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lazy-loading 'flavor' on Instance uuid eff21ec5-a51d-4004-9edf-1891f706fe9c {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 975.626792] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 975.678338] env[69994]: DEBUG oslo_vmware.api [None req-8e6d9c45-9888-4ac5-92e0-8d5d60891b33 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242207, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143968} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.678338] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e6d9c45-9888-4ac5-92e0-8d5d60891b33 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 975.678338] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8e6d9c45-9888-4ac5-92e0-8d5d60891b33 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 975.678338] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8e6d9c45-9888-4ac5-92e0-8d5d60891b33 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 975.678338] env[69994]: INFO nova.compute.manager [None req-8e6d9c45-9888-4ac5-92e0-8d5d60891b33 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Took 1.13 seconds to destroy the instance on the hypervisor. [ 975.678338] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8e6d9c45-9888-4ac5-92e0-8d5d60891b33 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 975.678338] env[69994]: DEBUG nova.compute.manager [-] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 975.678338] env[69994]: DEBUG nova.network.neutron [-] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 975.902927] env[69994]: DEBUG oslo_vmware.api [None req-a5171f02-4c58-453e-96dd-a41642fcabb8 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': task-3242208, 'name': PowerOffVM_Task, 'duration_secs': 0.167708} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.903247] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5171f02-4c58-453e-96dd-a41642fcabb8 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 975.903506] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a5171f02-4c58-453e-96dd-a41642fcabb8 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 975.903887] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b3c18f2e-f31b-44e7-815b-d83c564c67ee {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.910478] env[69994]: ERROR nova.scheduler.client.report [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [req-11e9b2f4-ba2a-46a0-9629-248c4c99f50b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 92ce3c95-4efe-4d04-802b-6b187afc5aa7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-11e9b2f4-ba2a-46a0-9629-248c4c99f50b"}]} [ 975.930170] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a5171f02-4c58-453e-96dd-a41642fcabb8 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 975.931082] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a5171f02-4c58-453e-96dd-a41642fcabb8 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 975.931082] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5171f02-4c58-453e-96dd-a41642fcabb8 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Deleting the datastore file [datastore2] ee7e0c02-ef19-4475-a936-f591c8185797 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 975.931082] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1af79acd-a1a3-4cb9-9ea2-76ab01874dd1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.935085] env[69994]: DEBUG nova.scheduler.client.report [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Refreshing inventories for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 975.940873] env[69994]: DEBUG oslo_vmware.api [None req-a5171f02-4c58-453e-96dd-a41642fcabb8 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Waiting for the task: (returnval){ [ 975.940873] env[69994]: value = "task-3242210" [ 975.940873] env[69994]: _type = "Task" [ 975.940873] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.954354] env[69994]: DEBUG oslo_vmware.api [None req-a5171f02-4c58-453e-96dd-a41642fcabb8 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': task-3242210, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.955411] env[69994]: DEBUG nova.network.neutron [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Updating instance_info_cache with network_info: [{"id": "b3052355-2e24-4ec5-9b33-231dad5489a5", "address": "fa:16:3e:be:67:b0", "network": {"id": "14e5b992-c393-4acc-ab6d-ad5983fe2729", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1321568807-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee188ea80c9847188df8b8482b7c6ec7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3052355-2e", "ovs_interfaceid": "b3052355-2e24-4ec5-9b33-231dad5489a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.962305] env[69994]: DEBUG nova.scheduler.client.report [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Updating ProviderTree inventory for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 975.962667] env[69994]: DEBUG nova.compute.provider_tree [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 975.980284] env[69994]: DEBUG nova.scheduler.client.report [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Refreshing aggregate associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 975.992454] env[69994]: DEBUG nova.compute.manager [req-461f5d97-9e98-4179-9781-f1fcdb1d93da req-9cc92855-f282-4f41-bbb8-1d0f6bd4c1ec service nova] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Received event network-vif-deleted-acadedcd-b660-437d-be25-176371cec4b3 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 975.993025] env[69994]: INFO nova.compute.manager [req-461f5d97-9e98-4179-9781-f1fcdb1d93da req-9cc92855-f282-4f41-bbb8-1d0f6bd4c1ec service nova] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Neutron deleted interface acadedcd-b660-437d-be25-176371cec4b3; detaching it from the instance and deleting it from the info cache [ 975.993025] env[69994]: DEBUG nova.network.neutron [req-461f5d97-9e98-4179-9781-f1fcdb1d93da req-9cc92855-f282-4f41-bbb8-1d0f6bd4c1ec service nova] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.996505] env[69994]: DEBUG nova.compute.manager [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 976.007987] env[69994]: DEBUG nova.scheduler.client.report [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Refreshing trait associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 976.025338] env[69994]: DEBUG nova.virt.hardware [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 976.025589] env[69994]: DEBUG nova.virt.hardware [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 976.025737] env[69994]: DEBUG nova.virt.hardware [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 976.025914] env[69994]: DEBUG nova.virt.hardware [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 976.026132] env[69994]: DEBUG nova.virt.hardware [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 976.026219] env[69994]: DEBUG nova.virt.hardware [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 976.026424] env[69994]: DEBUG nova.virt.hardware [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 976.026583] env[69994]: DEBUG nova.virt.hardware [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 976.026750] env[69994]: DEBUG nova.virt.hardware [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 976.026975] env[69994]: DEBUG nova.virt.hardware [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 976.027104] env[69994]: DEBUG nova.virt.hardware [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 976.028240] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-895c16ef-dfec-462f-a31d-f9546652f6ac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.041969] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-568be790-5850-4b38-b19e-c4a3c29ae6b9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.313978] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6e2fceb-6959-410a-89f8-c9ff71900244 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.321331] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaeb3fec-7776-4184-8fbe-0f731b32ce82 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.351227] env[69994]: DEBUG nova.network.neutron [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Successfully updated port: d4a8692e-1b97-42dd-a02a-53c07d85ad0a {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 976.352705] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab991870-ea76-4c7a-89b3-aa74cb6ec42e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.361040] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6a18d73-ad30-43d9-883e-cddfd3937e84 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.374591] env[69994]: DEBUG nova.compute.provider_tree [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 976.389139] env[69994]: DEBUG nova.compute.manager [req-c7f706a2-c271-4fb7-a595-bd07dbc7df19 req-283c731b-08b3-4024-a221-aac2cb58e466 service nova] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Received event network-vif-plugged-aa9589d7-176c-4249-9a3a-0af202829e70 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 976.389355] env[69994]: DEBUG oslo_concurrency.lockutils [req-c7f706a2-c271-4fb7-a595-bd07dbc7df19 req-283c731b-08b3-4024-a221-aac2cb58e466 service nova] Acquiring lock "0d42c1c7-2ac1-44f3-8311-929f141e0a65-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 976.389678] env[69994]: DEBUG oslo_concurrency.lockutils [req-c7f706a2-c271-4fb7-a595-bd07dbc7df19 req-283c731b-08b3-4024-a221-aac2cb58e466 service nova] Lock "0d42c1c7-2ac1-44f3-8311-929f141e0a65-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 976.389784] env[69994]: DEBUG oslo_concurrency.lockutils [req-c7f706a2-c271-4fb7-a595-bd07dbc7df19 req-283c731b-08b3-4024-a221-aac2cb58e466 service nova] Lock "0d42c1c7-2ac1-44f3-8311-929f141e0a65-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 976.389950] env[69994]: DEBUG nova.compute.manager [req-c7f706a2-c271-4fb7-a595-bd07dbc7df19 req-283c731b-08b3-4024-a221-aac2cb58e466 service nova] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] No waiting events found dispatching network-vif-plugged-aa9589d7-176c-4249-9a3a-0af202829e70 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 976.390142] env[69994]: WARNING nova.compute.manager [req-c7f706a2-c271-4fb7-a595-bd07dbc7df19 req-283c731b-08b3-4024-a221-aac2cb58e466 service nova] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Received unexpected event network-vif-plugged-aa9589d7-176c-4249-9a3a-0af202829e70 for instance with vm_state building and task_state spawning. [ 976.390311] env[69994]: DEBUG nova.compute.manager [req-c7f706a2-c271-4fb7-a595-bd07dbc7df19 req-283c731b-08b3-4024-a221-aac2cb58e466 service nova] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Received event network-changed-aa9589d7-176c-4249-9a3a-0af202829e70 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 976.390460] env[69994]: DEBUG nova.compute.manager [req-c7f706a2-c271-4fb7-a595-bd07dbc7df19 req-283c731b-08b3-4024-a221-aac2cb58e466 service nova] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Refreshing instance network info cache due to event network-changed-aa9589d7-176c-4249-9a3a-0af202829e70. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 976.390639] env[69994]: DEBUG oslo_concurrency.lockutils [req-c7f706a2-c271-4fb7-a595-bd07dbc7df19 req-283c731b-08b3-4024-a221-aac2cb58e466 service nova] Acquiring lock "refresh_cache-0d42c1c7-2ac1-44f3-8311-929f141e0a65" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.390788] env[69994]: DEBUG oslo_concurrency.lockutils [req-c7f706a2-c271-4fb7-a595-bd07dbc7df19 req-283c731b-08b3-4024-a221-aac2cb58e466 service nova] Acquired lock "refresh_cache-0d42c1c7-2ac1-44f3-8311-929f141e0a65" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 976.390951] env[69994]: DEBUG nova.network.neutron [req-c7f706a2-c271-4fb7-a595-bd07dbc7df19 req-283c731b-08b3-4024-a221-aac2cb58e466 service nova] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Refreshing network info cache for port aa9589d7-176c-4249-9a3a-0af202829e70 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 976.451598] env[69994]: DEBUG oslo_vmware.api [None req-a5171f02-4c58-453e-96dd-a41642fcabb8 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Task: {'id': task-3242210, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.100665} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.451874] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5171f02-4c58-453e-96dd-a41642fcabb8 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 976.452012] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a5171f02-4c58-453e-96dd-a41642fcabb8 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 976.452242] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a5171f02-4c58-453e-96dd-a41642fcabb8 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 976.452472] env[69994]: INFO nova.compute.manager [None req-a5171f02-4c58-453e-96dd-a41642fcabb8 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Took 1.08 seconds to destroy the instance on the hypervisor. [ 976.452747] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a5171f02-4c58-453e-96dd-a41642fcabb8 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 976.453008] env[69994]: DEBUG nova.compute.manager [-] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 976.453463] env[69994]: DEBUG nova.network.neutron [-] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 976.458344] env[69994]: DEBUG oslo_concurrency.lockutils [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Releasing lock "refresh_cache-e638fe4f-5f75-4d38-8a58-15dd66fd9e27" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 976.473596] env[69994]: DEBUG nova.network.neutron [-] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.484927] env[69994]: DEBUG nova.network.neutron [-] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 976.500166] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-273cd033-cbf8-45b6-8587-2909492bcfd3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.516819] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cd7b02a-aa0a-40b1-8386-9a89ec88f5f6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.555640] env[69994]: DEBUG nova.compute.manager [req-461f5d97-9e98-4179-9781-f1fcdb1d93da req-9cc92855-f282-4f41-bbb8-1d0f6bd4c1ec service nova] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Detach interface failed, port_id=acadedcd-b660-437d-be25-176371cec4b3, reason: Instance 4b3addd0-22b0-4793-af75-dba381c4a83f could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 976.588405] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-00e53e0b-c53a-486a-b8a6-7cf5ab725238 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 976.589521] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7cfb2006-43ac-48c2-9725-48cfed2e39e9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.597830] env[69994]: DEBUG oslo_vmware.api [None req-00e53e0b-c53a-486a-b8a6-7cf5ab725238 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 976.597830] env[69994]: value = "task-3242211" [ 976.597830] env[69994]: _type = "Task" [ 976.597830] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.609556] env[69994]: DEBUG oslo_vmware.api [None req-00e53e0b-c53a-486a-b8a6-7cf5ab725238 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242211, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.747091] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 976.748083] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b182cc66-02a0-4fc6-a6e8-d86101889ef0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.755641] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 976.755872] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-18528818-d7a6-4ff9-a9f3-db8616f81f0e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.820600] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 976.820852] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 976.821059] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Deleting the datastore file [datastore1] e638fe4f-5f75-4d38-8a58-15dd66fd9e27 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 976.821336] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-80b738fe-92f1-4d0d-8eba-b28a2a496361 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.828318] env[69994]: DEBUG oslo_vmware.api [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 976.828318] env[69994]: value = "task-3242213" [ 976.828318] env[69994]: _type = "Task" [ 976.828318] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.836080] env[69994]: DEBUG oslo_vmware.api [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242213, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.856830] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Acquiring lock "refresh_cache-850930f9-d5fb-4546-9796-30e164a1cdd3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.856975] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Acquired lock "refresh_cache-850930f9-d5fb-4546-9796-30e164a1cdd3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 976.857155] env[69994]: DEBUG nova.network.neutron [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 976.894416] env[69994]: ERROR nova.scheduler.client.report [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [req-65edb070-e51b-45cf-851c-cac8e9a53091] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 92ce3c95-4efe-4d04-802b-6b187afc5aa7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-65edb070-e51b-45cf-851c-cac8e9a53091"}]} [ 976.913854] env[69994]: DEBUG nova.scheduler.client.report [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Refreshing inventories for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 976.927414] env[69994]: DEBUG nova.scheduler.client.report [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Updating ProviderTree inventory for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 976.927642] env[69994]: DEBUG nova.compute.provider_tree [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 976.935058] env[69994]: DEBUG nova.network.neutron [req-c7f706a2-c271-4fb7-a595-bd07dbc7df19 req-283c731b-08b3-4024-a221-aac2cb58e466 service nova] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 976.941904] env[69994]: DEBUG nova.scheduler.client.report [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Refreshing aggregate associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 976.959013] env[69994]: DEBUG nova.scheduler.client.report [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Refreshing trait associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 976.978026] env[69994]: INFO nova.compute.manager [-] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Took 1.30 seconds to deallocate network for instance. [ 976.988158] env[69994]: DEBUG nova.network.neutron [-] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.016489] env[69994]: DEBUG nova.network.neutron [req-c7f706a2-c271-4fb7-a595-bd07dbc7df19 req-283c731b-08b3-4024-a221-aac2cb58e466 service nova] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.113997] env[69994]: DEBUG oslo_vmware.api [None req-00e53e0b-c53a-486a-b8a6-7cf5ab725238 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242211, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.145804] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Acquiring lock "eed22b8d-f8ea-4b90-8730-61d9a89ddfaa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 977.146146] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Lock "eed22b8d-f8ea-4b90-8730-61d9a89ddfaa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 977.273034] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5c024a2-65c5-44c2-9feb-493281610459 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.281418] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebde8097-97ce-4500-9d2e-a0c24b6fb101 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.319301] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89cb991e-cc29-4cd8-888e-80d8a84ae6c0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.327333] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4de84f80-0a9e-4c60-8a71-099fbbf472df {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.343518] env[69994]: DEBUG nova.compute.provider_tree [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 977.348350] env[69994]: DEBUG nova.network.neutron [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Successfully updated port: f249cccf-9606-4021-91bd-19028a6c4cbf {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 977.349409] env[69994]: DEBUG oslo_vmware.api [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242213, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.124857} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.349867] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 977.350067] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 977.350263] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 977.377193] env[69994]: INFO nova.scheduler.client.report [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Deleted allocations for instance e638fe4f-5f75-4d38-8a58-15dd66fd9e27 [ 977.394940] env[69994]: DEBUG nova.network.neutron [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 977.488016] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8e6d9c45-9888-4ac5-92e0-8d5d60891b33 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 977.491695] env[69994]: INFO nova.compute.manager [-] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Took 1.04 seconds to deallocate network for instance. [ 977.519425] env[69994]: DEBUG oslo_concurrency.lockutils [req-c7f706a2-c271-4fb7-a595-bd07dbc7df19 req-283c731b-08b3-4024-a221-aac2cb58e466 service nova] Releasing lock "refresh_cache-0d42c1c7-2ac1-44f3-8311-929f141e0a65" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 977.611846] env[69994]: DEBUG oslo_vmware.api [None req-00e53e0b-c53a-486a-b8a6-7cf5ab725238 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242211, 'name': PowerOffVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.626754] env[69994]: DEBUG nova.network.neutron [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Updating instance_info_cache with network_info: [{"id": "d4a8692e-1b97-42dd-a02a-53c07d85ad0a", "address": "fa:16:3e:40:0c:90", "network": {"id": "740b7ac0-1366-44a5-8ce6-82ec9b338dce", "bridge": "br-int", "label": "tempest-ServersTestJSON-1662708788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d55223bc0a464f1fa4d3b200926fd64f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "352165bb-004f-4180-9627-3a275dbe18af", "external-id": "nsx-vlan-transportzone-926", "segmentation_id": 926, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd4a8692e-1b", "ovs_interfaceid": "d4a8692e-1b97-42dd-a02a-53c07d85ad0a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.648453] env[69994]: DEBUG nova.compute.manager [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 977.854200] env[69994]: DEBUG oslo_concurrency.lockutils [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Acquiring lock "refresh_cache-0d42c1c7-2ac1-44f3-8311-929f141e0a65" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 977.854336] env[69994]: DEBUG oslo_concurrency.lockutils [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Acquired lock "refresh_cache-0d42c1c7-2ac1-44f3-8311-929f141e0a65" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 977.854489] env[69994]: DEBUG nova.network.neutron [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 977.883050] env[69994]: DEBUG oslo_concurrency.lockutils [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 977.884280] env[69994]: DEBUG nova.scheduler.client.report [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Updated inventory for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with generation 107 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 977.886018] env[69994]: DEBUG nova.compute.provider_tree [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Updating resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 generation from 107 to 108 during operation: update_inventory {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 977.886018] env[69994]: DEBUG nova.compute.provider_tree [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 978.001530] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5171f02-4c58-453e-96dd-a41642fcabb8 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 978.043278] env[69994]: INFO nova.compute.manager [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Rebuilding instance [ 978.079981] env[69994]: DEBUG nova.compute.manager [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 978.080883] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-025f21ef-076c-4ac7-ba33-fb29dbd68931 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.108900] env[69994]: DEBUG oslo_vmware.api [None req-00e53e0b-c53a-486a-b8a6-7cf5ab725238 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242211, 'name': PowerOffVM_Task, 'duration_secs': 1.016749} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.109235] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-00e53e0b-c53a-486a-b8a6-7cf5ab725238 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 978.109473] env[69994]: DEBUG nova.compute.manager [None req-00e53e0b-c53a-486a-b8a6-7cf5ab725238 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 978.110293] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9624ae0-750b-495c-8d46-2f2f139ea02e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.120482] env[69994]: DEBUG nova.compute.manager [req-aaa1e205-16f6-4031-b8fd-1f4062b03b21 req-145485df-686b-420b-9926-32b3898a9015 service nova] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Received event network-vif-plugged-d4a8692e-1b97-42dd-a02a-53c07d85ad0a {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 978.120739] env[69994]: DEBUG oslo_concurrency.lockutils [req-aaa1e205-16f6-4031-b8fd-1f4062b03b21 req-145485df-686b-420b-9926-32b3898a9015 service nova] Acquiring lock "850930f9-d5fb-4546-9796-30e164a1cdd3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 978.121013] env[69994]: DEBUG oslo_concurrency.lockutils [req-aaa1e205-16f6-4031-b8fd-1f4062b03b21 req-145485df-686b-420b-9926-32b3898a9015 service nova] Lock "850930f9-d5fb-4546-9796-30e164a1cdd3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 978.121245] env[69994]: DEBUG oslo_concurrency.lockutils [req-aaa1e205-16f6-4031-b8fd-1f4062b03b21 req-145485df-686b-420b-9926-32b3898a9015 service nova] Lock "850930f9-d5fb-4546-9796-30e164a1cdd3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 978.121464] env[69994]: DEBUG nova.compute.manager [req-aaa1e205-16f6-4031-b8fd-1f4062b03b21 req-145485df-686b-420b-9926-32b3898a9015 service nova] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] No waiting events found dispatching network-vif-plugged-d4a8692e-1b97-42dd-a02a-53c07d85ad0a {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 978.121715] env[69994]: WARNING nova.compute.manager [req-aaa1e205-16f6-4031-b8fd-1f4062b03b21 req-145485df-686b-420b-9926-32b3898a9015 service nova] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Received unexpected event network-vif-plugged-d4a8692e-1b97-42dd-a02a-53c07d85ad0a for instance with vm_state building and task_state spawning. [ 978.121925] env[69994]: DEBUG nova.compute.manager [req-aaa1e205-16f6-4031-b8fd-1f4062b03b21 req-145485df-686b-420b-9926-32b3898a9015 service nova] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Received event network-changed-d4a8692e-1b97-42dd-a02a-53c07d85ad0a {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 978.122134] env[69994]: DEBUG nova.compute.manager [req-aaa1e205-16f6-4031-b8fd-1f4062b03b21 req-145485df-686b-420b-9926-32b3898a9015 service nova] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Refreshing instance network info cache due to event network-changed-d4a8692e-1b97-42dd-a02a-53c07d85ad0a. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 978.122332] env[69994]: DEBUG oslo_concurrency.lockutils [req-aaa1e205-16f6-4031-b8fd-1f4062b03b21 req-145485df-686b-420b-9926-32b3898a9015 service nova] Acquiring lock "refresh_cache-850930f9-d5fb-4546-9796-30e164a1cdd3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.128826] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Releasing lock "refresh_cache-850930f9-d5fb-4546-9796-30e164a1cdd3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 978.129111] env[69994]: DEBUG nova.compute.manager [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Instance network_info: |[{"id": "d4a8692e-1b97-42dd-a02a-53c07d85ad0a", "address": "fa:16:3e:40:0c:90", "network": {"id": "740b7ac0-1366-44a5-8ce6-82ec9b338dce", "bridge": "br-int", "label": "tempest-ServersTestJSON-1662708788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d55223bc0a464f1fa4d3b200926fd64f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "352165bb-004f-4180-9627-3a275dbe18af", "external-id": "nsx-vlan-transportzone-926", "segmentation_id": 926, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd4a8692e-1b", "ovs_interfaceid": "d4a8692e-1b97-42dd-a02a-53c07d85ad0a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 978.129447] env[69994]: DEBUG oslo_concurrency.lockutils [req-aaa1e205-16f6-4031-b8fd-1f4062b03b21 req-145485df-686b-420b-9926-32b3898a9015 service nova] Acquired lock "refresh_cache-850930f9-d5fb-4546-9796-30e164a1cdd3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 978.129524] env[69994]: DEBUG nova.network.neutron [req-aaa1e205-16f6-4031-b8fd-1f4062b03b21 req-145485df-686b-420b-9926-32b3898a9015 service nova] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Refreshing network info cache for port d4a8692e-1b97-42dd-a02a-53c07d85ad0a {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 978.131646] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:40:0c:90', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '352165bb-004f-4180-9627-3a275dbe18af', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd4a8692e-1b97-42dd-a02a-53c07d85ad0a', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 978.138910] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Creating folder: Project (d55223bc0a464f1fa4d3b200926fd64f). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 978.141675] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-55554cbc-31a7-4523-b732-571137bc6bb3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.155384] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Created folder: Project (d55223bc0a464f1fa4d3b200926fd64f) in parent group-v647729. [ 978.155566] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Creating folder: Instances. Parent ref: group-v647954. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 978.157649] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b4c06fb3-0d81-42ce-835c-442aa41778e0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.168203] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Created folder: Instances in parent group-v647954. [ 978.168391] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 978.169321] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 978.169556] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 978.169794] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-df173978-dd20-4189-8b42-ccce44d0376a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.191181] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 978.191181] env[69994]: value = "task-3242216" [ 978.191181] env[69994]: _type = "Task" [ 978.191181] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.200603] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242216, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.390410] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.429s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 978.390994] env[69994]: DEBUG nova.compute.manager [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 978.394816] env[69994]: DEBUG nova.network.neutron [req-aaa1e205-16f6-4031-b8fd-1f4062b03b21 req-145485df-686b-420b-9926-32b3898a9015 service nova] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Updated VIF entry in instance network info cache for port d4a8692e-1b97-42dd-a02a-53c07d85ad0a. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 978.395207] env[69994]: DEBUG nova.network.neutron [req-aaa1e205-16f6-4031-b8fd-1f4062b03b21 req-145485df-686b-420b-9926-32b3898a9015 service nova] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Updating instance_info_cache with network_info: [{"id": "d4a8692e-1b97-42dd-a02a-53c07d85ad0a", "address": "fa:16:3e:40:0c:90", "network": {"id": "740b7ac0-1366-44a5-8ce6-82ec9b338dce", "bridge": "br-int", "label": "tempest-ServersTestJSON-1662708788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d55223bc0a464f1fa4d3b200926fd64f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "352165bb-004f-4180-9627-3a275dbe18af", "external-id": "nsx-vlan-transportzone-926", "segmentation_id": 926, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd4a8692e-1b", "ovs_interfaceid": "d4a8692e-1b97-42dd-a02a-53c07d85ad0a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 978.397703] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a62a7d39-ca72-4ba9-aca8-6a4020b7db8a tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.930s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 978.397703] env[69994]: DEBUG nova.objects.instance [None req-a62a7d39-ca72-4ba9-aca8-6a4020b7db8a tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lazy-loading 'resources' on Instance uuid e8caf244-413b-49bb-bdff-79aca0ccbc2b {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 978.402241] env[69994]: DEBUG nova.network.neutron [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 978.417617] env[69994]: DEBUG nova.compute.manager [req-c161f8bc-1536-48a3-a2f1-e40e26492976 req-045f93ad-027c-4df7-b063-0dbc97722e67 service nova] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Received event network-vif-plugged-f249cccf-9606-4021-91bd-19028a6c4cbf {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 978.417795] env[69994]: DEBUG oslo_concurrency.lockutils [req-c161f8bc-1536-48a3-a2f1-e40e26492976 req-045f93ad-027c-4df7-b063-0dbc97722e67 service nova] Acquiring lock "0d42c1c7-2ac1-44f3-8311-929f141e0a65-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 978.417982] env[69994]: DEBUG oslo_concurrency.lockutils [req-c161f8bc-1536-48a3-a2f1-e40e26492976 req-045f93ad-027c-4df7-b063-0dbc97722e67 service nova] Lock "0d42c1c7-2ac1-44f3-8311-929f141e0a65-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 978.418200] env[69994]: DEBUG oslo_concurrency.lockutils [req-c161f8bc-1536-48a3-a2f1-e40e26492976 req-045f93ad-027c-4df7-b063-0dbc97722e67 service nova] Lock "0d42c1c7-2ac1-44f3-8311-929f141e0a65-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 978.418375] env[69994]: DEBUG nova.compute.manager [req-c161f8bc-1536-48a3-a2f1-e40e26492976 req-045f93ad-027c-4df7-b063-0dbc97722e67 service nova] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] No waiting events found dispatching network-vif-plugged-f249cccf-9606-4021-91bd-19028a6c4cbf {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 978.418539] env[69994]: WARNING nova.compute.manager [req-c161f8bc-1536-48a3-a2f1-e40e26492976 req-045f93ad-027c-4df7-b063-0dbc97722e67 service nova] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Received unexpected event network-vif-plugged-f249cccf-9606-4021-91bd-19028a6c4cbf for instance with vm_state building and task_state spawning. [ 978.418696] env[69994]: DEBUG nova.compute.manager [req-c161f8bc-1536-48a3-a2f1-e40e26492976 req-045f93ad-027c-4df7-b063-0dbc97722e67 service nova] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Received event network-changed-f249cccf-9606-4021-91bd-19028a6c4cbf {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 978.418848] env[69994]: DEBUG nova.compute.manager [req-c161f8bc-1536-48a3-a2f1-e40e26492976 req-045f93ad-027c-4df7-b063-0dbc97722e67 service nova] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Refreshing instance network info cache due to event network-changed-f249cccf-9606-4021-91bd-19028a6c4cbf. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 978.419027] env[69994]: DEBUG oslo_concurrency.lockutils [req-c161f8bc-1536-48a3-a2f1-e40e26492976 req-045f93ad-027c-4df7-b063-0dbc97722e67 service nova] Acquiring lock "refresh_cache-0d42c1c7-2ac1-44f3-8311-929f141e0a65" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.493522] env[69994]: DEBUG oslo_concurrency.lockutils [None req-72a6fba1-0326-4a57-a690-1d5dbe223d47 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "e638fe4f-5f75-4d38-8a58-15dd66fd9e27" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 978.626364] env[69994]: DEBUG oslo_concurrency.lockutils [None req-00e53e0b-c53a-486a-b8a6-7cf5ab725238 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "eff21ec5-a51d-4004-9edf-1891f706fe9c" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 3.056s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 978.701612] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242216, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.790139] env[69994]: DEBUG nova.network.neutron [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Updating instance_info_cache with network_info: [{"id": "c14a1492-8af8-4c93-bf0d-f2424cd1f335", "address": "fa:16:3e:d2:d2:d5", "network": {"id": "b68748e7-e4b6-4161-a5dd-c82ac4b20bd5", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1602709496", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.187", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "978da68b62d8409da5d8c8a45cd985c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc14a1492-8a", "ovs_interfaceid": "c14a1492-8af8-4c93-bf0d-f2424cd1f335", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "aa9589d7-176c-4249-9a3a-0af202829e70", "address": "fa:16:3e:98:45:48", "network": {"id": "efae8434-ebdb-448a-af54-87ec53312bb9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-389270970", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.27", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "978da68b62d8409da5d8c8a45cd985c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7cd4cea-788c-4e6d-9df8-5d83838e2e2a", "external-id": "nsx-vlan-transportzone-361", "segmentation_id": 361, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa9589d7-17", "ovs_interfaceid": "aa9589d7-176c-4249-9a3a-0af202829e70", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f249cccf-9606-4021-91bd-19028a6c4cbf", "address": "fa:16:3e:54:0f:e4", "network": {"id": "b68748e7-e4b6-4161-a5dd-c82ac4b20bd5", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1602709496", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.58", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "978da68b62d8409da5d8c8a45cd985c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf249cccf-96", "ovs_interfaceid": "f249cccf-9606-4021-91bd-19028a6c4cbf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 978.898625] env[69994]: DEBUG nova.compute.utils [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 978.900271] env[69994]: DEBUG oslo_concurrency.lockutils [req-aaa1e205-16f6-4031-b8fd-1f4062b03b21 req-145485df-686b-420b-9926-32b3898a9015 service nova] Releasing lock "refresh_cache-850930f9-d5fb-4546-9796-30e164a1cdd3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 978.900502] env[69994]: DEBUG nova.compute.manager [req-aaa1e205-16f6-4031-b8fd-1f4062b03b21 req-145485df-686b-420b-9926-32b3898a9015 service nova] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Received event network-vif-unplugged-b3052355-2e24-4ec5-9b33-231dad5489a5 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 978.900693] env[69994]: DEBUG oslo_concurrency.lockutils [req-aaa1e205-16f6-4031-b8fd-1f4062b03b21 req-145485df-686b-420b-9926-32b3898a9015 service nova] Acquiring lock "e638fe4f-5f75-4d38-8a58-15dd66fd9e27-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 978.900905] env[69994]: DEBUG oslo_concurrency.lockutils [req-aaa1e205-16f6-4031-b8fd-1f4062b03b21 req-145485df-686b-420b-9926-32b3898a9015 service nova] Lock "e638fe4f-5f75-4d38-8a58-15dd66fd9e27-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 978.905020] env[69994]: DEBUG oslo_concurrency.lockutils [req-aaa1e205-16f6-4031-b8fd-1f4062b03b21 req-145485df-686b-420b-9926-32b3898a9015 service nova] Lock "e638fe4f-5f75-4d38-8a58-15dd66fd9e27-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 978.905020] env[69994]: DEBUG nova.compute.manager [req-aaa1e205-16f6-4031-b8fd-1f4062b03b21 req-145485df-686b-420b-9926-32b3898a9015 service nova] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] No waiting events found dispatching network-vif-unplugged-b3052355-2e24-4ec5-9b33-231dad5489a5 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 978.905020] env[69994]: DEBUG nova.compute.manager [req-aaa1e205-16f6-4031-b8fd-1f4062b03b21 req-145485df-686b-420b-9926-32b3898a9015 service nova] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Received event network-vif-unplugged-b3052355-2e24-4ec5-9b33-231dad5489a5 for instance with task_state deleting. {{(pid=69994) _process_instance_event /opt/stack/nova/nova/compute/manager.py:11515}} [ 978.905020] env[69994]: DEBUG nova.compute.manager [req-aaa1e205-16f6-4031-b8fd-1f4062b03b21 req-145485df-686b-420b-9926-32b3898a9015 service nova] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Received event network-changed-b3052355-2e24-4ec5-9b33-231dad5489a5 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 978.905020] env[69994]: DEBUG nova.compute.manager [req-aaa1e205-16f6-4031-b8fd-1f4062b03b21 req-145485df-686b-420b-9926-32b3898a9015 service nova] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Refreshing instance network info cache due to event network-changed-b3052355-2e24-4ec5-9b33-231dad5489a5. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 978.905020] env[69994]: DEBUG oslo_concurrency.lockutils [req-aaa1e205-16f6-4031-b8fd-1f4062b03b21 req-145485df-686b-420b-9926-32b3898a9015 service nova] Acquiring lock "refresh_cache-e638fe4f-5f75-4d38-8a58-15dd66fd9e27" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.905020] env[69994]: DEBUG oslo_concurrency.lockutils [req-aaa1e205-16f6-4031-b8fd-1f4062b03b21 req-145485df-686b-420b-9926-32b3898a9015 service nova] Acquired lock "refresh_cache-e638fe4f-5f75-4d38-8a58-15dd66fd9e27" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 978.905020] env[69994]: DEBUG nova.network.neutron [req-aaa1e205-16f6-4031-b8fd-1f4062b03b21 req-145485df-686b-420b-9926-32b3898a9015 service nova] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Refreshing network info cache for port b3052355-2e24-4ec5-9b33-231dad5489a5 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 978.905020] env[69994]: DEBUG nova.compute.manager [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 978.905020] env[69994]: DEBUG nova.network.neutron [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 978.944264] env[69994]: DEBUG nova.policy [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5f2b4659f30f4b9db4627d3d3abb6ba5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '605d72502cc644bfa4d875bf348246de', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 979.092972] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 979.093311] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5b061b8a-d9f4-4897-bf2c-584dc41fc2c2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.104665] env[69994]: DEBUG oslo_vmware.api [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Waiting for the task: (returnval){ [ 979.104665] env[69994]: value = "task-3242217" [ 979.104665] env[69994]: _type = "Task" [ 979.104665] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.115095] env[69994]: DEBUG oslo_vmware.api [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Task: {'id': task-3242217, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.169594] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53f287e2-9adc-4bac-99c6-7d02ff03499c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.175096] env[69994]: DEBUG nova.objects.instance [None req-95165717-7844-41d4-85f4-4d66e5ee5ecd tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lazy-loading 'flavor' on Instance uuid eff21ec5-a51d-4004-9edf-1891f706fe9c {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 979.179636] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6d7c7d9-2167-482a-9d36-a875c6a4de9e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.217746] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25defc64-deb8-4f27-a590-4f7f799a25a5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.225753] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242216, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.228952] env[69994]: DEBUG nova.network.neutron [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Successfully created port: b2521bc7-942e-4d29-bc89-0fd13a02f783 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 979.231406] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-682d190e-7f90-4a40-83b5-a2e7351f010f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.245304] env[69994]: DEBUG nova.compute.provider_tree [None req-a62a7d39-ca72-4ba9-aca8-6a4020b7db8a tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 979.293147] env[69994]: DEBUG oslo_concurrency.lockutils [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Releasing lock "refresh_cache-0d42c1c7-2ac1-44f3-8311-929f141e0a65" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 979.293557] env[69994]: DEBUG nova.compute.manager [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Instance network_info: |[{"id": "c14a1492-8af8-4c93-bf0d-f2424cd1f335", "address": "fa:16:3e:d2:d2:d5", "network": {"id": "b68748e7-e4b6-4161-a5dd-c82ac4b20bd5", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1602709496", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.187", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "978da68b62d8409da5d8c8a45cd985c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc14a1492-8a", "ovs_interfaceid": "c14a1492-8af8-4c93-bf0d-f2424cd1f335", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "aa9589d7-176c-4249-9a3a-0af202829e70", "address": "fa:16:3e:98:45:48", "network": {"id": "efae8434-ebdb-448a-af54-87ec53312bb9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-389270970", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.27", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "978da68b62d8409da5d8c8a45cd985c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7cd4cea-788c-4e6d-9df8-5d83838e2e2a", "external-id": "nsx-vlan-transportzone-361", "segmentation_id": 361, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa9589d7-17", "ovs_interfaceid": "aa9589d7-176c-4249-9a3a-0af202829e70", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f249cccf-9606-4021-91bd-19028a6c4cbf", "address": "fa:16:3e:54:0f:e4", "network": {"id": "b68748e7-e4b6-4161-a5dd-c82ac4b20bd5", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1602709496", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.58", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "978da68b62d8409da5d8c8a45cd985c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf249cccf-96", "ovs_interfaceid": "f249cccf-9606-4021-91bd-19028a6c4cbf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 979.293847] env[69994]: DEBUG oslo_concurrency.lockutils [req-c161f8bc-1536-48a3-a2f1-e40e26492976 req-045f93ad-027c-4df7-b063-0dbc97722e67 service nova] Acquired lock "refresh_cache-0d42c1c7-2ac1-44f3-8311-929f141e0a65" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 979.294047] env[69994]: DEBUG nova.network.neutron [req-c161f8bc-1536-48a3-a2f1-e40e26492976 req-045f93ad-027c-4df7-b063-0dbc97722e67 service nova] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Refreshing network info cache for port f249cccf-9606-4021-91bd-19028a6c4cbf {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 979.295221] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d2:d2:d5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '25f42474-5594-4733-a681-6c69f4afb946', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c14a1492-8af8-4c93-bf0d-f2424cd1f335', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:98:45:48', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7cd4cea-788c-4e6d-9df8-5d83838e2e2a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'aa9589d7-176c-4249-9a3a-0af202829e70', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:54:0f:e4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '25f42474-5594-4733-a681-6c69f4afb946', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f249cccf-9606-4021-91bd-19028a6c4cbf', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 979.307171] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Creating folder: Project (978da68b62d8409da5d8c8a45cd985c0). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 979.314020] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1ad57925-fb70-4c69-9473-21c6f1efbbd6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.321883] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Created folder: Project (978da68b62d8409da5d8c8a45cd985c0) in parent group-v647729. [ 979.322981] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Creating folder: Instances. Parent ref: group-v647957. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 979.323259] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c4b4ee62-1295-4636-890b-77cda797aa19 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.331914] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Created folder: Instances in parent group-v647957. [ 979.331914] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 979.332545] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 979.332727] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1eb594bf-0e23-4c66-a516-ff89ad50c657 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.358770] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 979.358770] env[69994]: value = "task-3242220" [ 979.358770] env[69994]: _type = "Task" [ 979.358770] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.366296] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242220, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.409575] env[69994]: DEBUG nova.compute.manager [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 979.616919] env[69994]: DEBUG oslo_vmware.api [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Task: {'id': task-3242217, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.634594] env[69994]: DEBUG nova.network.neutron [req-c161f8bc-1536-48a3-a2f1-e40e26492976 req-045f93ad-027c-4df7-b063-0dbc97722e67 service nova] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Updated VIF entry in instance network info cache for port f249cccf-9606-4021-91bd-19028a6c4cbf. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 979.635266] env[69994]: DEBUG nova.network.neutron [req-c161f8bc-1536-48a3-a2f1-e40e26492976 req-045f93ad-027c-4df7-b063-0dbc97722e67 service nova] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Updating instance_info_cache with network_info: [{"id": "c14a1492-8af8-4c93-bf0d-f2424cd1f335", "address": "fa:16:3e:d2:d2:d5", "network": {"id": "b68748e7-e4b6-4161-a5dd-c82ac4b20bd5", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1602709496", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.187", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "978da68b62d8409da5d8c8a45cd985c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc14a1492-8a", "ovs_interfaceid": "c14a1492-8af8-4c93-bf0d-f2424cd1f335", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "aa9589d7-176c-4249-9a3a-0af202829e70", "address": "fa:16:3e:98:45:48", "network": {"id": "efae8434-ebdb-448a-af54-87ec53312bb9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-389270970", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.27", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "978da68b62d8409da5d8c8a45cd985c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7cd4cea-788c-4e6d-9df8-5d83838e2e2a", "external-id": "nsx-vlan-transportzone-361", "segmentation_id": 361, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa9589d7-17", "ovs_interfaceid": "aa9589d7-176c-4249-9a3a-0af202829e70", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f249cccf-9606-4021-91bd-19028a6c4cbf", "address": "fa:16:3e:54:0f:e4", "network": {"id": "b68748e7-e4b6-4161-a5dd-c82ac4b20bd5", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1602709496", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.58", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "978da68b62d8409da5d8c8a45cd985c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf249cccf-96", "ovs_interfaceid": "f249cccf-9606-4021-91bd-19028a6c4cbf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 979.653180] env[69994]: DEBUG nova.network.neutron [req-aaa1e205-16f6-4031-b8fd-1f4062b03b21 req-145485df-686b-420b-9926-32b3898a9015 service nova] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Updated VIF entry in instance network info cache for port b3052355-2e24-4ec5-9b33-231dad5489a5. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 979.653560] env[69994]: DEBUG nova.network.neutron [req-aaa1e205-16f6-4031-b8fd-1f4062b03b21 req-145485df-686b-420b-9926-32b3898a9015 service nova] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Updating instance_info_cache with network_info: [{"id": "b3052355-2e24-4ec5-9b33-231dad5489a5", "address": "fa:16:3e:be:67:b0", "network": {"id": "14e5b992-c393-4acc-ab6d-ad5983fe2729", "bridge": null, "label": "tempest-DeleteServersTestJSON-1321568807-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee188ea80c9847188df8b8482b7c6ec7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapb3052355-2e", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 979.682154] env[69994]: DEBUG oslo_concurrency.lockutils [None req-95165717-7844-41d4-85f4-4d66e5ee5ecd tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "refresh_cache-eff21ec5-a51d-4004-9edf-1891f706fe9c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 979.682341] env[69994]: DEBUG oslo_concurrency.lockutils [None req-95165717-7844-41d4-85f4-4d66e5ee5ecd tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquired lock "refresh_cache-eff21ec5-a51d-4004-9edf-1891f706fe9c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 979.682558] env[69994]: DEBUG nova.network.neutron [None req-95165717-7844-41d4-85f4-4d66e5ee5ecd tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 979.682892] env[69994]: DEBUG nova.objects.instance [None req-95165717-7844-41d4-85f4-4d66e5ee5ecd tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lazy-loading 'info_cache' on Instance uuid eff21ec5-a51d-4004-9edf-1891f706fe9c {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 979.722809] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242216, 'name': CreateVM_Task, 'duration_secs': 1.455226} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.722992] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 979.723623] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 979.723794] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 979.724150] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 979.724400] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4364d81e-9c48-47b7-9475-1d5a1f68c1f2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.729169] env[69994]: DEBUG oslo_vmware.api [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Waiting for the task: (returnval){ [ 979.729169] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]528e752e-554b-585c-a1b7-d5bcbd270d01" [ 979.729169] env[69994]: _type = "Task" [ 979.729169] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.736837] env[69994]: DEBUG oslo_vmware.api [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]528e752e-554b-585c-a1b7-d5bcbd270d01, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.747830] env[69994]: DEBUG nova.scheduler.client.report [None req-a62a7d39-ca72-4ba9-aca8-6a4020b7db8a tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 979.869080] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242220, 'name': CreateVM_Task, 'duration_secs': 0.471941} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.869217] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 979.869948] env[69994]: DEBUG oslo_concurrency.lockutils [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.115791] env[69994]: DEBUG oslo_vmware.api [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Task: {'id': task-3242217, 'name': PowerOffVM_Task, 'duration_secs': 0.594629} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.116076] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 980.116760] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 980.117037] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c18de802-89cc-4d12-a577-7f1c72374b2a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.123874] env[69994]: DEBUG oslo_vmware.api [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Waiting for the task: (returnval){ [ 980.123874] env[69994]: value = "task-3242221" [ 980.123874] env[69994]: _type = "Task" [ 980.123874] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.131296] env[69994]: DEBUG oslo_vmware.api [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Task: {'id': task-3242221, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.140906] env[69994]: DEBUG oslo_concurrency.lockutils [req-c161f8bc-1536-48a3-a2f1-e40e26492976 req-045f93ad-027c-4df7-b063-0dbc97722e67 service nova] Releasing lock "refresh_cache-0d42c1c7-2ac1-44f3-8311-929f141e0a65" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 980.156624] env[69994]: DEBUG oslo_concurrency.lockutils [req-aaa1e205-16f6-4031-b8fd-1f4062b03b21 req-145485df-686b-420b-9926-32b3898a9015 service nova] Releasing lock "refresh_cache-e638fe4f-5f75-4d38-8a58-15dd66fd9e27" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 980.186532] env[69994]: DEBUG nova.objects.base [None req-95165717-7844-41d4-85f4-4d66e5ee5ecd tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 980.239608] env[69994]: DEBUG oslo_vmware.api [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]528e752e-554b-585c-a1b7-d5bcbd270d01, 'name': SearchDatastore_Task, 'duration_secs': 0.008907} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.239959] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 980.240237] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 980.240479] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.240624] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 980.240829] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 980.241188] env[69994]: DEBUG oslo_concurrency.lockutils [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 980.241525] env[69994]: DEBUG oslo_concurrency.lockutils [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 980.241821] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3f899456-29e7-46d0-ad31-9570e9b570ad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.243692] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-071cb230-8544-4ce5-919a-18b4aa6bc926 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.248748] env[69994]: DEBUG oslo_vmware.api [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Waiting for the task: (returnval){ [ 980.248748] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]528820cd-1b16-87ce-e4ba-97dfd2a53529" [ 980.248748] env[69994]: _type = "Task" [ 980.248748] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.252940] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a62a7d39-ca72-4ba9-aca8-6a4020b7db8a tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.857s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 980.254751] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 980.254922] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 980.255834] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.441s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 980.257239] env[69994]: INFO nova.compute.claims [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 980.260213] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68d6db80-208d-4cb2-8d41-ba218ac60c35 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.266239] env[69994]: DEBUG oslo_vmware.api [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]528820cd-1b16-87ce-e4ba-97dfd2a53529, 'name': SearchDatastore_Task, 'duration_secs': 0.00874} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.266954] env[69994]: DEBUG oslo_concurrency.lockutils [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 980.267047] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 980.267279] env[69994]: DEBUG oslo_concurrency.lockutils [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.269742] env[69994]: DEBUG oslo_vmware.api [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Waiting for the task: (returnval){ [ 980.269742] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5248bbed-380e-15bb-9378-d1ed997d23e2" [ 980.269742] env[69994]: _type = "Task" [ 980.269742] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.277556] env[69994]: DEBUG oslo_vmware.api [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5248bbed-380e-15bb-9378-d1ed997d23e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.278536] env[69994]: INFO nova.scheduler.client.report [None req-a62a7d39-ca72-4ba9-aca8-6a4020b7db8a tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Deleted allocations for instance e8caf244-413b-49bb-bdff-79aca0ccbc2b [ 980.421105] env[69994]: DEBUG nova.compute.manager [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 980.446914] env[69994]: DEBUG nova.virt.hardware [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 980.447260] env[69994]: DEBUG nova.virt.hardware [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 980.447397] env[69994]: DEBUG nova.virt.hardware [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 980.447538] env[69994]: DEBUG nova.virt.hardware [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 980.447726] env[69994]: DEBUG nova.virt.hardware [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 980.447873] env[69994]: DEBUG nova.virt.hardware [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 980.448122] env[69994]: DEBUG nova.virt.hardware [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 980.448305] env[69994]: DEBUG nova.virt.hardware [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 980.448478] env[69994]: DEBUG nova.virt.hardware [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 980.448661] env[69994]: DEBUG nova.virt.hardware [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 980.448891] env[69994]: DEBUG nova.virt.hardware [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 980.450117] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2391a63-84bd-496f-bd18-9959d26ad04b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.458841] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32a6101f-12fb-423c-a00f-d658fd64733c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.633906] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] VM already powered off {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 980.634098] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Volume detach. Driver type: vmdk {{(pid=69994) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 980.634381] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647906', 'volume_id': 'f439bb0f-f9f2-4fca-9d5c-9ad196d08d41', 'name': 'volume-f439bb0f-f9f2-4fca-9d5c-9ad196d08d41', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c14851d2-66c5-4865-ae66-abbe303f0c31', 'attached_at': '', 'detached_at': '', 'volume_id': 'f439bb0f-f9f2-4fca-9d5c-9ad196d08d41', 'serial': 'f439bb0f-f9f2-4fca-9d5c-9ad196d08d41'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 980.635049] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b08c8d82-8f48-4c8b-a209-3c0b6cad8d67 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.653302] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49caca69-5454-40db-b8de-a79e92534053 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.659917] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa70429f-567a-4895-92ac-518929a405b0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.678311] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7de6fac-ef09-40ca-974f-b93f94177fb0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.695607] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] The volume has not been displaced from its original location: [datastore1] volume-f439bb0f-f9f2-4fca-9d5c-9ad196d08d41/volume-f439bb0f-f9f2-4fca-9d5c-9ad196d08d41.vmdk. No consolidation needed. {{(pid=69994) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 980.701037] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Reconfiguring VM instance instance-00000045 to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 980.702322] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-59246b65-c118-4741-bc74-f9e9ca44da89 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.715565] env[69994]: DEBUG nova.compute.manager [req-04601c90-914f-41aa-9e7d-44ee093b2111 req-6b107a89-5b07-4e94-8892-9360f6a26bae service nova] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Received event network-vif-plugged-b2521bc7-942e-4d29-bc89-0fd13a02f783 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 980.715778] env[69994]: DEBUG oslo_concurrency.lockutils [req-04601c90-914f-41aa-9e7d-44ee093b2111 req-6b107a89-5b07-4e94-8892-9360f6a26bae service nova] Acquiring lock "e17fcc84-7c86-41b6-88ec-8a35619534b6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 980.715984] env[69994]: DEBUG oslo_concurrency.lockutils [req-04601c90-914f-41aa-9e7d-44ee093b2111 req-6b107a89-5b07-4e94-8892-9360f6a26bae service nova] Lock "e17fcc84-7c86-41b6-88ec-8a35619534b6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 980.716167] env[69994]: DEBUG oslo_concurrency.lockutils [req-04601c90-914f-41aa-9e7d-44ee093b2111 req-6b107a89-5b07-4e94-8892-9360f6a26bae service nova] Lock "e17fcc84-7c86-41b6-88ec-8a35619534b6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 980.716334] env[69994]: DEBUG nova.compute.manager [req-04601c90-914f-41aa-9e7d-44ee093b2111 req-6b107a89-5b07-4e94-8892-9360f6a26bae service nova] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] No waiting events found dispatching network-vif-plugged-b2521bc7-942e-4d29-bc89-0fd13a02f783 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 980.716501] env[69994]: WARNING nova.compute.manager [req-04601c90-914f-41aa-9e7d-44ee093b2111 req-6b107a89-5b07-4e94-8892-9360f6a26bae service nova] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Received unexpected event network-vif-plugged-b2521bc7-942e-4d29-bc89-0fd13a02f783 for instance with vm_state building and task_state spawning. [ 980.723418] env[69994]: DEBUG oslo_vmware.api [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Waiting for the task: (returnval){ [ 980.723418] env[69994]: value = "task-3242222" [ 980.723418] env[69994]: _type = "Task" [ 980.723418] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.731612] env[69994]: DEBUG oslo_vmware.api [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Task: {'id': task-3242222, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.779296] env[69994]: DEBUG oslo_vmware.api [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5248bbed-380e-15bb-9378-d1ed997d23e2, 'name': SearchDatastore_Task, 'duration_secs': 0.008015} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.780103] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3eb7d4c3-92c2-4221-a1f9-fd601273eeb3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.786357] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a62a7d39-ca72-4ba9-aca8-6a4020b7db8a tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "e8caf244-413b-49bb-bdff-79aca0ccbc2b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.852s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 980.788425] env[69994]: DEBUG oslo_vmware.api [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Waiting for the task: (returnval){ [ 980.788425] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]529aea9f-c05a-9031-fdf9-a46db253332c" [ 980.788425] env[69994]: _type = "Task" [ 980.788425] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.796438] env[69994]: DEBUG oslo_vmware.api [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]529aea9f-c05a-9031-fdf9-a46db253332c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.848809] env[69994]: DEBUG nova.network.neutron [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Successfully updated port: b2521bc7-942e-4d29-bc89-0fd13a02f783 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 980.976621] env[69994]: DEBUG nova.network.neutron [None req-95165717-7844-41d4-85f4-4d66e5ee5ecd tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Updating instance_info_cache with network_info: [{"id": "f7d5e758-a993-4a15-8bba-a695f99a96f4", "address": "fa:16:3e:f7:92:8c", "network": {"id": "ac8cc640-01c3-4a64-8b69-1458ee2131a1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1685085861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38d5a89ed7c248c3be506ef12caf5f1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7d5e758-a9", "ovs_interfaceid": "f7d5e758-a993-4a15-8bba-a695f99a96f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.233129] env[69994]: DEBUG oslo_vmware.api [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Task: {'id': task-3242222, 'name': ReconfigVM_Task, 'duration_secs': 0.217433} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.233503] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Reconfigured VM instance instance-00000045 to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 981.238953] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c6020b9-c0ee-4b27-ad94-4a393bffec58 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.258411] env[69994]: DEBUG oslo_vmware.api [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Waiting for the task: (returnval){ [ 981.258411] env[69994]: value = "task-3242223" [ 981.258411] env[69994]: _type = "Task" [ 981.258411] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.269093] env[69994]: DEBUG oslo_vmware.api [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Task: {'id': task-3242223, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.299830] env[69994]: DEBUG oslo_vmware.api [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]529aea9f-c05a-9031-fdf9-a46db253332c, 'name': SearchDatastore_Task, 'duration_secs': 0.01065} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.299830] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 981.300282] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 850930f9-d5fb-4546-9796-30e164a1cdd3/850930f9-d5fb-4546-9796-30e164a1cdd3.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 981.300885] env[69994]: DEBUG oslo_concurrency.lockutils [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 981.301122] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 981.301622] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1b537fed-feaf-44bb-ac1d-a0f13bfd618f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.303971] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-831efabf-f3b5-418b-af64-53291bdb35d8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.313373] env[69994]: DEBUG oslo_vmware.api [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Waiting for the task: (returnval){ [ 981.313373] env[69994]: value = "task-3242224" [ 981.313373] env[69994]: _type = "Task" [ 981.313373] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.315103] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 981.315428] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 981.319435] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c962c002-4a75-4396-b4df-cf5153e8011b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.327864] env[69994]: DEBUG oslo_vmware.api [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Waiting for the task: (returnval){ [ 981.327864] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5277889c-05ae-0313-f719-e4377748ee3c" [ 981.327864] env[69994]: _type = "Task" [ 981.327864] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.331979] env[69994]: DEBUG oslo_vmware.api [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Task: {'id': task-3242224, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.344552] env[69994]: DEBUG oslo_vmware.api [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5277889c-05ae-0313-f719-e4377748ee3c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.351397] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "refresh_cache-e17fcc84-7c86-41b6-88ec-8a35619534b6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.351590] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquired lock "refresh_cache-e17fcc84-7c86-41b6-88ec-8a35619534b6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 981.351709] env[69994]: DEBUG nova.network.neutron [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 981.479797] env[69994]: DEBUG oslo_concurrency.lockutils [None req-95165717-7844-41d4-85f4-4d66e5ee5ecd tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Releasing lock "refresh_cache-eff21ec5-a51d-4004-9edf-1891f706fe9c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 981.529392] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a97455a5-7979-4fc9-861b-442c63e5b761 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.538666] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b49c66f-5eed-424c-8946-92d7d89d3ca3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.572794] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ba0b13b-80d6-49bb-b9ef-aa624ce3fe20 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.582369] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07fa53bb-642e-45cd-b752-6afc252c33b9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.599598] env[69994]: DEBUG nova.compute.provider_tree [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 981.755315] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0a7c5e75-d6a6-447a-be95-b4c6981b8e1f tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquiring lock "87473dd1-458d-4ef4-a1bd-7e653e509ea4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 981.759667] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0a7c5e75-d6a6-447a-be95-b4c6981b8e1f tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "87473dd1-458d-4ef4-a1bd-7e653e509ea4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 981.759667] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0a7c5e75-d6a6-447a-be95-b4c6981b8e1f tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquiring lock "87473dd1-458d-4ef4-a1bd-7e653e509ea4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 981.759667] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0a7c5e75-d6a6-447a-be95-b4c6981b8e1f tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "87473dd1-458d-4ef4-a1bd-7e653e509ea4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 981.759667] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0a7c5e75-d6a6-447a-be95-b4c6981b8e1f tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "87473dd1-458d-4ef4-a1bd-7e653e509ea4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 981.765024] env[69994]: INFO nova.compute.manager [None req-0a7c5e75-d6a6-447a-be95-b4c6981b8e1f tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Terminating instance [ 981.773106] env[69994]: DEBUG oslo_vmware.api [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Task: {'id': task-3242223, 'name': ReconfigVM_Task, 'duration_secs': 0.114375} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.773427] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647906', 'volume_id': 'f439bb0f-f9f2-4fca-9d5c-9ad196d08d41', 'name': 'volume-f439bb0f-f9f2-4fca-9d5c-9ad196d08d41', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c14851d2-66c5-4865-ae66-abbe303f0c31', 'attached_at': '', 'detached_at': '', 'volume_id': 'f439bb0f-f9f2-4fca-9d5c-9ad196d08d41', 'serial': 'f439bb0f-f9f2-4fca-9d5c-9ad196d08d41'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 981.773714] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 981.775188] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d43b8678-28f9-4c5c-904a-41c50eb2a8bb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.782720] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 981.782948] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1f3aec63-1c05-4649-85f7-f73a19a1d027 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.824335] env[69994]: DEBUG oslo_vmware.api [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Task: {'id': task-3242224, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.468085} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.824595] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 850930f9-d5fb-4546-9796-30e164a1cdd3/850930f9-d5fb-4546-9796-30e164a1cdd3.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 981.824814] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 981.825071] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2a3a0bb4-66a8-4bb1-8aa5-a4d27c032f31 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.831716] env[69994]: DEBUG oslo_vmware.api [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Waiting for the task: (returnval){ [ 981.831716] env[69994]: value = "task-3242226" [ 981.831716] env[69994]: _type = "Task" [ 981.831716] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.843863] env[69994]: DEBUG oslo_vmware.api [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Task: {'id': task-3242226, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.848719] env[69994]: DEBUG oslo_vmware.api [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5277889c-05ae-0313-f719-e4377748ee3c, 'name': SearchDatastore_Task, 'duration_secs': 0.018682} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.850517] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 981.850716] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 981.850889] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Deleting the datastore file [datastore1] c14851d2-66c5-4865-ae66-abbe303f0c31 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 981.851129] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-790e04ad-fdd5-4fe2-b0e0-4a4a66e210f5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.853465] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9c866bdd-3d3e-4bc5-b3fe-2357ef39575b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.860069] env[69994]: DEBUG oslo_vmware.api [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Waiting for the task: (returnval){ [ 981.860069] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52cb84b7-5b0d-ce70-ac81-bdb27b0bc232" [ 981.860069] env[69994]: _type = "Task" [ 981.860069] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.863452] env[69994]: DEBUG oslo_vmware.api [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Waiting for the task: (returnval){ [ 981.863452] env[69994]: value = "task-3242227" [ 981.863452] env[69994]: _type = "Task" [ 981.863452] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.870201] env[69994]: DEBUG oslo_vmware.api [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52cb84b7-5b0d-ce70-ac81-bdb27b0bc232, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.874712] env[69994]: DEBUG oslo_vmware.api [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Task: {'id': task-3242227, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.884741] env[69994]: DEBUG nova.network.neutron [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 982.007308] env[69994]: DEBUG nova.network.neutron [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Updating instance_info_cache with network_info: [{"id": "b2521bc7-942e-4d29-bc89-0fd13a02f783", "address": "fa:16:3e:90:f2:06", "network": {"id": "6d4a143e-4f55-4918-8454-462892aacf0e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1594130263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "605d72502cc644bfa4d875bf348246de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52e117d3-d120-42c6-8e72-70085845acbf", "external-id": "nsx-vlan-transportzone-934", "segmentation_id": 934, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2521bc7-94", "ovs_interfaceid": "b2521bc7-942e-4d29-bc89-0fd13a02f783", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 982.103948] env[69994]: DEBUG nova.scheduler.client.report [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 982.269142] env[69994]: DEBUG nova.compute.manager [None req-0a7c5e75-d6a6-447a-be95-b4c6981b8e1f tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 982.269428] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0a7c5e75-d6a6-447a-be95-b4c6981b8e1f tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 982.271890] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfad444c-97e8-48b9-91c9-6072f8587da3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.279148] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a7c5e75-d6a6-447a-be95-b4c6981b8e1f tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 982.279425] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6d8b3146-cb0f-4512-a5c4-ea713680eb9e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.285182] env[69994]: DEBUG oslo_vmware.api [None req-0a7c5e75-d6a6-447a-be95-b4c6981b8e1f tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 982.285182] env[69994]: value = "task-3242228" [ 982.285182] env[69994]: _type = "Task" [ 982.285182] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.292894] env[69994]: DEBUG oslo_vmware.api [None req-0a7c5e75-d6a6-447a-be95-b4c6981b8e1f tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242228, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.341256] env[69994]: DEBUG oslo_vmware.api [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Task: {'id': task-3242226, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060905} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.341521] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 982.342324] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06ab8b98-b9b6-4e1a-8203-7da593019465 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.364370] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] 850930f9-d5fb-4546-9796-30e164a1cdd3/850930f9-d5fb-4546-9796-30e164a1cdd3.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 982.364740] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1b703d0e-7bf1-4f95-8218-65ab224a2e4e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.390682] env[69994]: DEBUG oslo_vmware.api [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52cb84b7-5b0d-ce70-ac81-bdb27b0bc232, 'name': SearchDatastore_Task, 'duration_secs': 0.008681} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.394474] env[69994]: DEBUG oslo_concurrency.lockutils [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 982.394762] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 0d42c1c7-2ac1-44f3-8311-929f141e0a65/0d42c1c7-2ac1-44f3-8311-929f141e0a65.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 982.395044] env[69994]: DEBUG oslo_vmware.api [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Task: {'id': task-3242227, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.130785} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.395294] env[69994]: DEBUG oslo_vmware.api [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Waiting for the task: (returnval){ [ 982.395294] env[69994]: value = "task-3242229" [ 982.395294] env[69994]: _type = "Task" [ 982.395294] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.395524] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ca24527f-95b5-4bed-bf20-f85abd378c5e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.397341] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 982.397529] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 982.397736] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 982.408101] env[69994]: DEBUG oslo_vmware.api [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Task: {'id': task-3242229, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.409602] env[69994]: DEBUG oslo_vmware.api [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Waiting for the task: (returnval){ [ 982.409602] env[69994]: value = "task-3242230" [ 982.409602] env[69994]: _type = "Task" [ 982.409602] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.417014] env[69994]: DEBUG oslo_vmware.api [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': task-3242230, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.458547] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Volume detach. Driver type: vmdk {{(pid=69994) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 982.459022] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-df6f24e2-3be2-420c-ab56-ee8f41ece19b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.468097] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cd9acc7-6b13-4c74-9011-fd0506467310 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.485970] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-95165717-7844-41d4-85f4-4d66e5ee5ecd tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 982.486359] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b22d894e-15c6-4288-ba78-c1e75aa617b9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.501779] env[69994]: ERROR nova.compute.manager [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Failed to detach volume f439bb0f-f9f2-4fca-9d5c-9ad196d08d41 from /dev/sda: nova.exception.InstanceNotFound: Instance c14851d2-66c5-4865-ae66-abbe303f0c31 could not be found. [ 982.501779] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Traceback (most recent call last): [ 982.501779] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 982.501779] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] self.driver.rebuild(**kwargs) [ 982.501779] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 982.501779] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] raise NotImplementedError() [ 982.501779] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] NotImplementedError [ 982.501779] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] [ 982.501779] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] During handling of the above exception, another exception occurred: [ 982.501779] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] [ 982.501779] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Traceback (most recent call last): [ 982.501779] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 982.501779] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] self.driver.detach_volume(context, old_connection_info, [ 982.501779] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 982.501779] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] return self._volumeops.detach_volume(connection_info, instance) [ 982.501779] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 982.501779] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] self._detach_volume_vmdk(connection_info, instance) [ 982.501779] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 982.501779] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 982.501779] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 982.501779] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] stable_ref.fetch_moref(session) [ 982.501779] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 982.501779] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] raise exception.InstanceNotFound(instance_id=self._uuid) [ 982.501779] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] nova.exception.InstanceNotFound: Instance c14851d2-66c5-4865-ae66-abbe303f0c31 could not be found. [ 982.501779] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] [ 982.505768] env[69994]: DEBUG oslo_vmware.api [None req-95165717-7844-41d4-85f4-4d66e5ee5ecd tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 982.505768] env[69994]: value = "task-3242231" [ 982.505768] env[69994]: _type = "Task" [ 982.505768] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.509328] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Releasing lock "refresh_cache-e17fcc84-7c86-41b6-88ec-8a35619534b6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 982.509766] env[69994]: DEBUG nova.compute.manager [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Instance network_info: |[{"id": "b2521bc7-942e-4d29-bc89-0fd13a02f783", "address": "fa:16:3e:90:f2:06", "network": {"id": "6d4a143e-4f55-4918-8454-462892aacf0e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1594130263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "605d72502cc644bfa4d875bf348246de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52e117d3-d120-42c6-8e72-70085845acbf", "external-id": "nsx-vlan-transportzone-934", "segmentation_id": 934, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2521bc7-94", "ovs_interfaceid": "b2521bc7-942e-4d29-bc89-0fd13a02f783", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 982.510157] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:90:f2:06', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '52e117d3-d120-42c6-8e72-70085845acbf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b2521bc7-942e-4d29-bc89-0fd13a02f783', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 982.518221] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 982.522132] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 982.522794] env[69994]: DEBUG oslo_vmware.api [None req-95165717-7844-41d4-85f4-4d66e5ee5ecd tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242231, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.522794] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fd1dd312-6833-4470-b403-18cec6c983ff {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.544773] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 982.544773] env[69994]: value = "task-3242232" [ 982.544773] env[69994]: _type = "Task" [ 982.544773] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.554626] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242232, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.609066] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.353s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 982.609620] env[69994]: DEBUG nova.compute.manager [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 982.612316] env[69994]: DEBUG oslo_concurrency.lockutils [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.629s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 982.612546] env[69994]: DEBUG nova.objects.instance [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Lazy-loading 'pci_requests' on Instance uuid 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 982.664601] env[69994]: DEBUG nova.compute.utils [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Build of instance c14851d2-66c5-4865-ae66-abbe303f0c31 aborted: Failed to rebuild volume backed instance. {{(pid=69994) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 982.666877] env[69994]: ERROR nova.compute.manager [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance c14851d2-66c5-4865-ae66-abbe303f0c31 aborted: Failed to rebuild volume backed instance. [ 982.666877] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Traceback (most recent call last): [ 982.666877] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 982.666877] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] self.driver.rebuild(**kwargs) [ 982.666877] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 982.666877] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] raise NotImplementedError() [ 982.666877] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] NotImplementedError [ 982.666877] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] [ 982.666877] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] During handling of the above exception, another exception occurred: [ 982.666877] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] [ 982.666877] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Traceback (most recent call last): [ 982.666877] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] File "/opt/stack/nova/nova/compute/manager.py", line 3643, in _rebuild_volume_backed_instance [ 982.666877] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] self._detach_root_volume(context, instance, root_bdm) [ 982.666877] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] File "/opt/stack/nova/nova/compute/manager.py", line 3622, in _detach_root_volume [ 982.666877] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] with excutils.save_and_reraise_exception(): [ 982.666877] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 982.666877] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] self.force_reraise() [ 982.666877] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 982.666877] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] raise self.value [ 982.666877] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 982.666877] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] self.driver.detach_volume(context, old_connection_info, [ 982.666877] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 982.666877] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] return self._volumeops.detach_volume(connection_info, instance) [ 982.666877] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 982.666877] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] self._detach_volume_vmdk(connection_info, instance) [ 982.666877] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 982.666877] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 982.666877] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 982.666877] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] stable_ref.fetch_moref(session) [ 982.666877] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 982.666877] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] raise exception.InstanceNotFound(instance_id=self._uuid) [ 982.666877] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] nova.exception.InstanceNotFound: Instance c14851d2-66c5-4865-ae66-abbe303f0c31 could not be found. [ 982.666877] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] [ 982.666877] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] During handling of the above exception, another exception occurred: [ 982.666877] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] [ 982.668134] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Traceback (most recent call last): [ 982.668134] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] File "/opt/stack/nova/nova/compute/manager.py", line 11471, in _error_out_instance_on_exception [ 982.668134] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] yield [ 982.668134] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] File "/opt/stack/nova/nova/compute/manager.py", line 3911, in rebuild_instance [ 982.668134] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] self._do_rebuild_instance_with_claim( [ 982.668134] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] File "/opt/stack/nova/nova/compute/manager.py", line 3997, in _do_rebuild_instance_with_claim [ 982.668134] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] self._do_rebuild_instance( [ 982.668134] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] File "/opt/stack/nova/nova/compute/manager.py", line 4189, in _do_rebuild_instance [ 982.668134] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] self._rebuild_default_impl(**kwargs) [ 982.668134] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] File "/opt/stack/nova/nova/compute/manager.py", line 3766, in _rebuild_default_impl [ 982.668134] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] self._rebuild_volume_backed_instance( [ 982.668134] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] File "/opt/stack/nova/nova/compute/manager.py", line 3658, in _rebuild_volume_backed_instance [ 982.668134] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] raise exception.BuildAbortException( [ 982.668134] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] nova.exception.BuildAbortException: Build of instance c14851d2-66c5-4865-ae66-abbe303f0c31 aborted: Failed to rebuild volume backed instance. [ 982.668134] env[69994]: ERROR nova.compute.manager [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] [ 982.756791] env[69994]: DEBUG nova.compute.manager [req-dca0c050-8126-4486-b3be-50bdf3cc9643 req-8feb7af1-db3d-4a9f-8848-2caef731a83d service nova] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Received event network-changed-b2521bc7-942e-4d29-bc89-0fd13a02f783 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 982.757017] env[69994]: DEBUG nova.compute.manager [req-dca0c050-8126-4486-b3be-50bdf3cc9643 req-8feb7af1-db3d-4a9f-8848-2caef731a83d service nova] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Refreshing instance network info cache due to event network-changed-b2521bc7-942e-4d29-bc89-0fd13a02f783. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 982.757311] env[69994]: DEBUG oslo_concurrency.lockutils [req-dca0c050-8126-4486-b3be-50bdf3cc9643 req-8feb7af1-db3d-4a9f-8848-2caef731a83d service nova] Acquiring lock "refresh_cache-e17fcc84-7c86-41b6-88ec-8a35619534b6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.757538] env[69994]: DEBUG oslo_concurrency.lockutils [req-dca0c050-8126-4486-b3be-50bdf3cc9643 req-8feb7af1-db3d-4a9f-8848-2caef731a83d service nova] Acquired lock "refresh_cache-e17fcc84-7c86-41b6-88ec-8a35619534b6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 982.757623] env[69994]: DEBUG nova.network.neutron [req-dca0c050-8126-4486-b3be-50bdf3cc9643 req-8feb7af1-db3d-4a9f-8848-2caef731a83d service nova] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Refreshing network info cache for port b2521bc7-942e-4d29-bc89-0fd13a02f783 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 982.795424] env[69994]: DEBUG oslo_vmware.api [None req-0a7c5e75-d6a6-447a-be95-b4c6981b8e1f tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242228, 'name': PowerOffVM_Task, 'duration_secs': 0.26133} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.795751] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a7c5e75-d6a6-447a-be95-b4c6981b8e1f tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 982.795919] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0a7c5e75-d6a6-447a-be95-b4c6981b8e1f tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 982.796209] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-33037ee3-5011-4324-8f85-6334df61465d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.874450] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0a7c5e75-d6a6-447a-be95-b4c6981b8e1f tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 982.874806] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0a7c5e75-d6a6-447a-be95-b4c6981b8e1f tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 982.875098] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a7c5e75-d6a6-447a-be95-b4c6981b8e1f tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Deleting the datastore file [datastore2] 87473dd1-458d-4ef4-a1bd-7e653e509ea4 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 982.875453] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eaee77a0-3761-4698-bd2d-c8869c932ef9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.881793] env[69994]: DEBUG oslo_vmware.api [None req-0a7c5e75-d6a6-447a-be95-b4c6981b8e1f tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for the task: (returnval){ [ 982.881793] env[69994]: value = "task-3242234" [ 982.881793] env[69994]: _type = "Task" [ 982.881793] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.891408] env[69994]: DEBUG oslo_vmware.api [None req-0a7c5e75-d6a6-447a-be95-b4c6981b8e1f tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242234, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.912089] env[69994]: DEBUG oslo_vmware.api [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Task: {'id': task-3242229, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.922600] env[69994]: DEBUG oslo_vmware.api [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': task-3242230, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.015636] env[69994]: DEBUG oslo_vmware.api [None req-95165717-7844-41d4-85f4-4d66e5ee5ecd tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242231, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.054255] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242232, 'name': CreateVM_Task, 'duration_secs': 0.49502} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.054498] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 983.055137] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.055305] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 983.055626] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 983.055868] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5af90fa6-3873-49f9-850b-3b92280a27da {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.059992] env[69994]: DEBUG oslo_vmware.api [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 983.059992] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52a57555-2b73-3800-c00f-34a1c9a4fca8" [ 983.059992] env[69994]: _type = "Task" [ 983.059992] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.067475] env[69994]: DEBUG oslo_vmware.api [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52a57555-2b73-3800-c00f-34a1c9a4fca8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.116014] env[69994]: DEBUG nova.compute.utils [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 983.118683] env[69994]: DEBUG nova.objects.instance [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Lazy-loading 'numa_topology' on Instance uuid 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 983.119839] env[69994]: DEBUG nova.compute.manager [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Not allocating networking since 'none' was specified. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 983.392183] env[69994]: DEBUG oslo_vmware.api [None req-0a7c5e75-d6a6-447a-be95-b4c6981b8e1f tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Task: {'id': task-3242234, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.492194} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.392448] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a7c5e75-d6a6-447a-be95-b4c6981b8e1f tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 983.392700] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0a7c5e75-d6a6-447a-be95-b4c6981b8e1f tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 983.392827] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0a7c5e75-d6a6-447a-be95-b4c6981b8e1f tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 983.392948] env[69994]: INFO nova.compute.manager [None req-0a7c5e75-d6a6-447a-be95-b4c6981b8e1f tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Took 1.12 seconds to destroy the instance on the hypervisor. [ 983.393215] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0a7c5e75-d6a6-447a-be95-b4c6981b8e1f tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 983.393402] env[69994]: DEBUG nova.compute.manager [-] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 983.393493] env[69994]: DEBUG nova.network.neutron [-] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 983.409160] env[69994]: DEBUG oslo_vmware.api [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Task: {'id': task-3242229, 'name': ReconfigVM_Task, 'duration_secs': 0.638685} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.409432] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Reconfigured VM instance instance-0000004e to attach disk [datastore1] 850930f9-d5fb-4546-9796-30e164a1cdd3/850930f9-d5fb-4546-9796-30e164a1cdd3.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 983.410072] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b91966fa-a8b4-4350-87d5-8b3cd4b56071 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.415884] env[69994]: DEBUG oslo_vmware.api [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Waiting for the task: (returnval){ [ 983.415884] env[69994]: value = "task-3242235" [ 983.415884] env[69994]: _type = "Task" [ 983.415884] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.421880] env[69994]: DEBUG oslo_vmware.api [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': task-3242230, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.552657} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.422151] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 0d42c1c7-2ac1-44f3-8311-929f141e0a65/0d42c1c7-2ac1-44f3-8311-929f141e0a65.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 983.422376] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 983.422976] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-403e318a-72bd-4cdf-9add-809c696a70b3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.427803] env[69994]: DEBUG oslo_vmware.api [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Task: {'id': task-3242235, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.435383] env[69994]: DEBUG oslo_vmware.api [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Waiting for the task: (returnval){ [ 983.435383] env[69994]: value = "task-3242236" [ 983.435383] env[69994]: _type = "Task" [ 983.435383] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.444879] env[69994]: DEBUG oslo_vmware.api [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': task-3242236, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.518289] env[69994]: DEBUG oslo_vmware.api [None req-95165717-7844-41d4-85f4-4d66e5ee5ecd tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242231, 'name': PowerOnVM_Task, 'duration_secs': 0.570198} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.518559] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-95165717-7844-41d4-85f4-4d66e5ee5ecd tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 983.518777] env[69994]: DEBUG nova.compute.manager [None req-95165717-7844-41d4-85f4-4d66e5ee5ecd tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 983.519554] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e718a25-910f-4adb-a48d-c701ccb0ca34 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.522690] env[69994]: DEBUG nova.network.neutron [req-dca0c050-8126-4486-b3be-50bdf3cc9643 req-8feb7af1-db3d-4a9f-8848-2caef731a83d service nova] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Updated VIF entry in instance network info cache for port b2521bc7-942e-4d29-bc89-0fd13a02f783. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 983.523180] env[69994]: DEBUG nova.network.neutron [req-dca0c050-8126-4486-b3be-50bdf3cc9643 req-8feb7af1-db3d-4a9f-8848-2caef731a83d service nova] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Updating instance_info_cache with network_info: [{"id": "b2521bc7-942e-4d29-bc89-0fd13a02f783", "address": "fa:16:3e:90:f2:06", "network": {"id": "6d4a143e-4f55-4918-8454-462892aacf0e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1594130263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "605d72502cc644bfa4d875bf348246de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52e117d3-d120-42c6-8e72-70085845acbf", "external-id": "nsx-vlan-transportzone-934", "segmentation_id": 934, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2521bc7-94", "ovs_interfaceid": "b2521bc7-942e-4d29-bc89-0fd13a02f783", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.571024] env[69994]: DEBUG oslo_vmware.api [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52a57555-2b73-3800-c00f-34a1c9a4fca8, 'name': SearchDatastore_Task, 'duration_secs': 0.063812} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.571218] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 983.571304] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 983.571538] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.571677] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 983.571843] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 983.572420] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0a58e436-e0e4-48ab-ad5d-e4e399229c53 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.582264] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 983.582437] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 983.583167] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d059cfd-ca87-48c3-9877-a246c46f28e0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.588678] env[69994]: DEBUG oslo_vmware.api [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 983.588678] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52172afc-c56e-29d2-0424-096d07639c97" [ 983.588678] env[69994]: _type = "Task" [ 983.588678] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.598640] env[69994]: DEBUG oslo_vmware.api [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52172afc-c56e-29d2-0424-096d07639c97, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.621314] env[69994]: DEBUG nova.compute.manager [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 983.626210] env[69994]: INFO nova.compute.claims [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 983.925445] env[69994]: DEBUG oslo_vmware.api [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Task: {'id': task-3242235, 'name': Rename_Task, 'duration_secs': 0.285067} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.925718] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 983.925959] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-55c92f3b-3f92-4f40-b6b2-bba1847e79f3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.932364] env[69994]: DEBUG oslo_vmware.api [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Waiting for the task: (returnval){ [ 983.932364] env[69994]: value = "task-3242237" [ 983.932364] env[69994]: _type = "Task" [ 983.932364] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.941877] env[69994]: DEBUG oslo_vmware.api [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Task: {'id': task-3242237, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.946403] env[69994]: DEBUG oslo_vmware.api [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': task-3242236, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069437} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.946639] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 983.947397] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a4ef169-23f1-423e-bc72-f5fd7cb80a38 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.974840] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] 0d42c1c7-2ac1-44f3-8311-929f141e0a65/0d42c1c7-2ac1-44f3-8311-929f141e0a65.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 983.975360] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8301d59f-53bf-46ce-8194-290bc4814ce0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.994933] env[69994]: DEBUG oslo_vmware.api [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Waiting for the task: (returnval){ [ 983.994933] env[69994]: value = "task-3242238" [ 983.994933] env[69994]: _type = "Task" [ 983.994933] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.001964] env[69994]: DEBUG oslo_vmware.api [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': task-3242238, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.025976] env[69994]: DEBUG oslo_concurrency.lockutils [req-dca0c050-8126-4486-b3be-50bdf3cc9643 req-8feb7af1-db3d-4a9f-8848-2caef731a83d service nova] Releasing lock "refresh_cache-e17fcc84-7c86-41b6-88ec-8a35619534b6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 984.098394] env[69994]: DEBUG oslo_vmware.api [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52172afc-c56e-29d2-0424-096d07639c97, 'name': SearchDatastore_Task, 'duration_secs': 0.035154} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.099182] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6624e070-ba5f-467d-bb74-066bba96b75f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.103952] env[69994]: DEBUG oslo_vmware.api [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 984.103952] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52733bf7-0d79-918f-af10-52bb79b0a20b" [ 984.103952] env[69994]: _type = "Task" [ 984.103952] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.111364] env[69994]: DEBUG oslo_vmware.api [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52733bf7-0d79-918f-af10-52bb79b0a20b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.225671] env[69994]: DEBUG nova.network.neutron [-] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 984.442809] env[69994]: DEBUG oslo_vmware.api [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Task: {'id': task-3242237, 'name': PowerOnVM_Task} progress is 78%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.504505] env[69994]: DEBUG oslo_vmware.api [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': task-3242238, 'name': ReconfigVM_Task, 'duration_secs': 0.328978} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.504505] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Reconfigured VM instance instance-0000004d to attach disk [datastore1] 0d42c1c7-2ac1-44f3-8311-929f141e0a65/0d42c1c7-2ac1-44f3-8311-929f141e0a65.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 984.505212] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-acdfeda8-1843-4736-ba84-291ed1c23881 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.510933] env[69994]: DEBUG oslo_vmware.api [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Waiting for the task: (returnval){ [ 984.510933] env[69994]: value = "task-3242239" [ 984.510933] env[69994]: _type = "Task" [ 984.510933] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.518900] env[69994]: DEBUG oslo_vmware.api [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': task-3242239, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.613638] env[69994]: DEBUG oslo_vmware.api [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52733bf7-0d79-918f-af10-52bb79b0a20b, 'name': SearchDatastore_Task, 'duration_secs': 0.015452} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.613894] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 984.614180] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] e17fcc84-7c86-41b6-88ec-8a35619534b6/e17fcc84-7c86-41b6-88ec-8a35619534b6.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 984.614438] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4e2d62e0-963a-4b28-90d8-97be1201451e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.621191] env[69994]: DEBUG oslo_vmware.api [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 984.621191] env[69994]: value = "task-3242240" [ 984.621191] env[69994]: _type = "Task" [ 984.621191] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.631033] env[69994]: DEBUG oslo_vmware.api [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242240, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.634757] env[69994]: DEBUG nova.compute.manager [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 984.666223] env[69994]: DEBUG nova.virt.hardware [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 984.667029] env[69994]: DEBUG nova.virt.hardware [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 984.667029] env[69994]: DEBUG nova.virt.hardware [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 984.667158] env[69994]: DEBUG nova.virt.hardware [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 984.667282] env[69994]: DEBUG nova.virt.hardware [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 984.667395] env[69994]: DEBUG nova.virt.hardware [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 984.667606] env[69994]: DEBUG nova.virt.hardware [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 984.667795] env[69994]: DEBUG nova.virt.hardware [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 984.667969] env[69994]: DEBUG nova.virt.hardware [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 984.668146] env[69994]: DEBUG nova.virt.hardware [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 984.668321] env[69994]: DEBUG nova.virt.hardware [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 984.669239] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e85abfd-9306-4dd8-be75-039a73161509 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.678234] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2686cc90-58b0-4e3e-a1f2-ca1eba6b0905 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.683213] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 984.696853] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Instance VIF info [] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 984.703182] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Creating folder: Project (db63ff3bfbd343f2b24a3814e636c5bc). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 984.706647] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-74e8bad9-bf57-4fbf-9e6b-aaadaf1ec48d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.718725] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Created folder: Project (db63ff3bfbd343f2b24a3814e636c5bc) in parent group-v647729. [ 984.718936] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Creating folder: Instances. Parent ref: group-v647961. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 984.721675] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dca87aac-a917-4e00-b954-b5b99c401b95 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.728183] env[69994]: INFO nova.compute.manager [-] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Took 1.33 seconds to deallocate network for instance. [ 984.736544] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Created folder: Instances in parent group-v647961. [ 984.737336] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 984.737336] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 984.737556] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9b7c7e45-c96c-4de5-ad35-f50555a4991d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.759029] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 984.759029] env[69994]: value = "task-3242243" [ 984.759029] env[69994]: _type = "Task" [ 984.759029] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.907075] env[69994]: DEBUG nova.compute.manager [req-9a72ed19-7433-434d-a42a-1fe293143162 req-eff12967-0ef3-4f18-973b-b2a0460b4890 service nova] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Received event network-vif-deleted-b7e8be98-685a-4d07-9440-e07af619b026 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 984.917096] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93cbab1b-dd06-4964-971b-0a2dc7c319ce {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.926030] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e6d885d-3479-4923-8e4a-dc0dec082ca2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.965360] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3da9305-f55f-43f0-ae87-7482e52ee767 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.974866] env[69994]: DEBUG oslo_vmware.api [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Task: {'id': task-3242237, 'name': PowerOnVM_Task, 'duration_secs': 0.812745} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.978230] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 984.978448] env[69994]: INFO nova.compute.manager [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Took 8.98 seconds to spawn the instance on the hypervisor. [ 984.978540] env[69994]: DEBUG nova.compute.manager [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 984.979424] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-558b2a8d-17c5-4b33-9179-10b2ced50d2a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.983122] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de575cdd-ecab-4d87-857a-0e542045dc72 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.003049] env[69994]: DEBUG nova.compute.provider_tree [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 985.023316] env[69994]: DEBUG oslo_vmware.api [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': task-3242239, 'name': Rename_Task, 'duration_secs': 0.152957} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.023606] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 985.023880] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cd20c3d2-4496-4245-93fe-0a18ca589327 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.030397] env[69994]: DEBUG oslo_vmware.api [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Waiting for the task: (returnval){ [ 985.030397] env[69994]: value = "task-3242244" [ 985.030397] env[69994]: _type = "Task" [ 985.030397] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.039789] env[69994]: DEBUG oslo_vmware.api [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': task-3242244, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.134721] env[69994]: DEBUG oslo_vmware.api [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242240, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.481897} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.135019] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] e17fcc84-7c86-41b6-88ec-8a35619534b6/e17fcc84-7c86-41b6-88ec-8a35619534b6.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 985.135518] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 985.135797] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7c394167-eeaa-4997-bebe-67af313eda99 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.142220] env[69994]: DEBUG oslo_vmware.api [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 985.142220] env[69994]: value = "task-3242245" [ 985.142220] env[69994]: _type = "Task" [ 985.142220] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.150619] env[69994]: DEBUG oslo_vmware.api [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242245, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.238962] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0a7c5e75-d6a6-447a-be95-b4c6981b8e1f tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 985.268676] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242243, 'name': CreateVM_Task, 'duration_secs': 0.332416} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.268856] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 985.269295] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.269457] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 985.269817] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 985.270083] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71342de8-5a14-47fd-b6e0-a0133c82a9e9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.274836] env[69994]: DEBUG oslo_vmware.api [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Waiting for the task: (returnval){ [ 985.274836] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5246c054-438c-8a63-dfa1-2910eef5d60e" [ 985.274836] env[69994]: _type = "Task" [ 985.274836] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.282217] env[69994]: DEBUG oslo_vmware.api [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5246c054-438c-8a63-dfa1-2910eef5d60e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.512039] env[69994]: DEBUG nova.scheduler.client.report [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 985.515686] env[69994]: INFO nova.compute.manager [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Took 28.13 seconds to build instance. [ 985.544620] env[69994]: DEBUG oslo_vmware.api [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': task-3242244, 'name': PowerOnVM_Task, 'duration_secs': 0.481916} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.545025] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 985.545325] env[69994]: INFO nova.compute.manager [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Took 16.52 seconds to spawn the instance on the hypervisor. [ 985.545591] env[69994]: DEBUG nova.compute.manager [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 985.546799] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aadcd25-f2cf-4c80-ae44-42dc8c6dd3cd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.630960] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc7f918b-bc02-4759-8bdd-8945b25214f8 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Acquiring lock "c14851d2-66c5-4865-ae66-abbe303f0c31" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 985.631259] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc7f918b-bc02-4759-8bdd-8945b25214f8 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Lock "c14851d2-66c5-4865-ae66-abbe303f0c31" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 985.631478] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc7f918b-bc02-4759-8bdd-8945b25214f8 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Acquiring lock "c14851d2-66c5-4865-ae66-abbe303f0c31-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 985.631662] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc7f918b-bc02-4759-8bdd-8945b25214f8 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Lock "c14851d2-66c5-4865-ae66-abbe303f0c31-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 985.631833] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc7f918b-bc02-4759-8bdd-8945b25214f8 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Lock "c14851d2-66c5-4865-ae66-abbe303f0c31-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 985.633966] env[69994]: INFO nova.compute.manager [None req-fc7f918b-bc02-4759-8bdd-8945b25214f8 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Terminating instance [ 985.652586] env[69994]: DEBUG oslo_vmware.api [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242245, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067242} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.652586] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 985.653382] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe79792e-cd78-4da3-a79b-480748d9a636 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.676188] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] e17fcc84-7c86-41b6-88ec-8a35619534b6/e17fcc84-7c86-41b6-88ec-8a35619534b6.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 985.677020] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73d0d80c-8c60-4ee5-a783-789e308f23da {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.696886] env[69994]: DEBUG oslo_vmware.api [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 985.696886] env[69994]: value = "task-3242246" [ 985.696886] env[69994]: _type = "Task" [ 985.696886] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.705618] env[69994]: DEBUG oslo_vmware.api [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242246, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.785085] env[69994]: DEBUG oslo_vmware.api [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5246c054-438c-8a63-dfa1-2910eef5d60e, 'name': SearchDatastore_Task, 'duration_secs': 0.00975} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.785391] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 985.785627] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 985.785862] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.786016] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 985.786206] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 985.786460] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5d579636-48ea-4542-978a-ab69cc07dd3d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.795487] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 985.795657] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 985.796342] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-121c6f68-4789-41ec-8d3f-334e5ce946e0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.801425] env[69994]: DEBUG oslo_vmware.api [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Waiting for the task: (returnval){ [ 985.801425] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]529dd575-89db-1598-3775-0b7447c43449" [ 985.801425] env[69994]: _type = "Task" [ 985.801425] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.808335] env[69994]: DEBUG oslo_vmware.api [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]529dd575-89db-1598-3775-0b7447c43449, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.016959] env[69994]: DEBUG oslo_concurrency.lockutils [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.405s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 986.019234] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 13.070s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 986.019426] env[69994]: DEBUG nova.objects.instance [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69994) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 986.022133] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a49e636d-862e-4a76-954e-067fb513882c tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Lock "850930f9-d5fb-4546-9796-30e164a1cdd3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.646s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 986.048482] env[69994]: INFO nova.network.neutron [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Updating port e4706905-12e3-43b1-a83a-409585a96042 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 986.066645] env[69994]: INFO nova.compute.manager [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Took 29.77 seconds to build instance. [ 986.141021] env[69994]: DEBUG nova.compute.manager [None req-fc7f918b-bc02-4759-8bdd-8945b25214f8 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 986.141021] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-db80db90-1803-4b14-84e4-53a203cdb530 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.151934] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d41389ee-ebeb-418d-ad3c-3ed2008df1e6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.184844] env[69994]: WARNING nova.virt.vmwareapi.driver [None req-fc7f918b-bc02-4759-8bdd-8945b25214f8 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance c14851d2-66c5-4865-ae66-abbe303f0c31 could not be found. [ 986.185074] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fc7f918b-bc02-4759-8bdd-8945b25214f8 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 986.185497] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2a60d184-64da-4df1-8d07-d18b0e8abde2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.193318] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-187f7b4d-2d0a-407a-ae87-4f1bfd8a3ce2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.213671] env[69994]: DEBUG oslo_vmware.api [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242246, 'name': ReconfigVM_Task, 'duration_secs': 0.282512} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.213945] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Reconfigured VM instance instance-0000004f to attach disk [datastore1] e17fcc84-7c86-41b6-88ec-8a35619534b6/e17fcc84-7c86-41b6-88ec-8a35619534b6.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 986.214567] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d23ce71e-4f9e-49ca-ab6b-e70d125e9024 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.227068] env[69994]: WARNING nova.virt.vmwareapi.vmops [None req-fc7f918b-bc02-4759-8bdd-8945b25214f8 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c14851d2-66c5-4865-ae66-abbe303f0c31 could not be found. [ 986.227265] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fc7f918b-bc02-4759-8bdd-8945b25214f8 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 986.227445] env[69994]: INFO nova.compute.manager [None req-fc7f918b-bc02-4759-8bdd-8945b25214f8 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Took 0.09 seconds to destroy the instance on the hypervisor. [ 986.227708] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fc7f918b-bc02-4759-8bdd-8945b25214f8 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 986.228294] env[69994]: DEBUG nova.compute.manager [-] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 986.228385] env[69994]: DEBUG nova.network.neutron [-] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 986.232101] env[69994]: DEBUG oslo_vmware.api [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 986.232101] env[69994]: value = "task-3242247" [ 986.232101] env[69994]: _type = "Task" [ 986.232101] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.239452] env[69994]: DEBUG oslo_vmware.api [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242247, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.312096] env[69994]: DEBUG oslo_vmware.api [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]529dd575-89db-1598-3775-0b7447c43449, 'name': SearchDatastore_Task, 'duration_secs': 0.01959} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.312863] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92ff5d40-9204-42e6-b1e5-e0a255a9523b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.318364] env[69994]: DEBUG oslo_vmware.api [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Waiting for the task: (returnval){ [ 986.318364] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52e3b0c5-db7b-2261-4125-19b44514e99f" [ 986.318364] env[69994]: _type = "Task" [ 986.318364] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.328279] env[69994]: DEBUG oslo_vmware.api [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52e3b0c5-db7b-2261-4125-19b44514e99f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.568786] env[69994]: DEBUG oslo_concurrency.lockutils [None req-81152da8-d64a-4091-ac70-5c0ba9c0446e tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Lock "0d42c1c7-2ac1-44f3-8311-929f141e0a65" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.284s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 986.751815] env[69994]: DEBUG oslo_vmware.api [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242247, 'name': Rename_Task, 'duration_secs': 0.134325} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.753151] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 986.753476] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-accbb673-370f-4a9c-ab7e-9f5ad2c32adf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.764580] env[69994]: DEBUG oslo_vmware.api [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 986.764580] env[69994]: value = "task-3242248" [ 986.764580] env[69994]: _type = "Task" [ 986.764580] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.782276] env[69994]: DEBUG oslo_vmware.api [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242248, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.830211] env[69994]: DEBUG oslo_vmware.api [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52e3b0c5-db7b-2261-4125-19b44514e99f, 'name': SearchDatastore_Task, 'duration_secs': 0.039734} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.830500] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 986.830874] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 095e75b1-7806-4d1d-ab9e-49735f7aa0f3/095e75b1-7806-4d1d-ab9e-49735f7aa0f3.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 986.832506] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2fd77c24-483e-41c7-84dc-711186a25a76 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.840889] env[69994]: DEBUG oslo_vmware.api [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Waiting for the task: (returnval){ [ 986.840889] env[69994]: value = "task-3242249" [ 986.840889] env[69994]: _type = "Task" [ 986.840889] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.851826] env[69994]: DEBUG oslo_vmware.api [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': task-3242249, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.965169] env[69994]: DEBUG nova.compute.manager [req-4dd0bb87-7f2f-465c-a2f2-4943eb6fc051 req-061eb567-26d0-46c9-9b2f-bd83f9d18ccb service nova] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Received event network-vif-deleted-58a0ef78-0177-4996-ba8f-adbf83a9c0e6 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 986.965169] env[69994]: INFO nova.compute.manager [req-4dd0bb87-7f2f-465c-a2f2-4943eb6fc051 req-061eb567-26d0-46c9-9b2f-bd83f9d18ccb service nova] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Neutron deleted interface 58a0ef78-0177-4996-ba8f-adbf83a9c0e6; detaching it from the instance and deleting it from the info cache [ 986.965590] env[69994]: DEBUG nova.network.neutron [req-4dd0bb87-7f2f-465c-a2f2-4943eb6fc051 req-061eb567-26d0-46c9-9b2f-bd83f9d18ccb service nova] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 986.985821] env[69994]: DEBUG nova.compute.manager [req-de93038d-347a-4225-a9f0-5f76c5fdf0ae req-21519e71-8f76-4f9d-b7c6-1c5a5503815c service nova] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Received event network-changed-d4a8692e-1b97-42dd-a02a-53c07d85ad0a {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 986.986098] env[69994]: DEBUG nova.compute.manager [req-de93038d-347a-4225-a9f0-5f76c5fdf0ae req-21519e71-8f76-4f9d-b7c6-1c5a5503815c service nova] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Refreshing instance network info cache due to event network-changed-d4a8692e-1b97-42dd-a02a-53c07d85ad0a. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 986.986401] env[69994]: DEBUG oslo_concurrency.lockutils [req-de93038d-347a-4225-a9f0-5f76c5fdf0ae req-21519e71-8f76-4f9d-b7c6-1c5a5503815c service nova] Acquiring lock "refresh_cache-850930f9-d5fb-4546-9796-30e164a1cdd3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.991418] env[69994]: DEBUG oslo_concurrency.lockutils [req-de93038d-347a-4225-a9f0-5f76c5fdf0ae req-21519e71-8f76-4f9d-b7c6-1c5a5503815c service nova] Acquired lock "refresh_cache-850930f9-d5fb-4546-9796-30e164a1cdd3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 986.991418] env[69994]: DEBUG nova.network.neutron [req-de93038d-347a-4225-a9f0-5f76c5fdf0ae req-21519e71-8f76-4f9d-b7c6-1c5a5503815c service nova] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Refreshing network info cache for port d4a8692e-1b97-42dd-a02a-53c07d85ad0a {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 987.030099] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b60044a-f85c-4f5d-b04e-73bbe0a713ea tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 987.034618] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 13.503s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 987.038133] env[69994]: DEBUG nova.objects.instance [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69994) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 987.285517] env[69994]: DEBUG oslo_vmware.api [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242248, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.325274] env[69994]: DEBUG nova.network.neutron [-] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 987.359431] env[69994]: DEBUG oslo_vmware.api [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': task-3242249, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.471823] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-47ba7b34-08da-4b38-a030-d6a45bd40027 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.484794] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e667e092-583f-424d-80f2-dfb20c8e58c1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.505421] env[69994]: DEBUG oslo_concurrency.lockutils [None req-796ba68f-0266-4631-8f8f-df1518697467 tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Acquiring lock "0d42c1c7-2ac1-44f3-8311-929f141e0a65" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 987.505421] env[69994]: DEBUG oslo_concurrency.lockutils [None req-796ba68f-0266-4631-8f8f-df1518697467 tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Lock "0d42c1c7-2ac1-44f3-8311-929f141e0a65" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 987.505421] env[69994]: DEBUG oslo_concurrency.lockutils [None req-796ba68f-0266-4631-8f8f-df1518697467 tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Acquiring lock "0d42c1c7-2ac1-44f3-8311-929f141e0a65-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 987.505421] env[69994]: DEBUG oslo_concurrency.lockutils [None req-796ba68f-0266-4631-8f8f-df1518697467 tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Lock "0d42c1c7-2ac1-44f3-8311-929f141e0a65-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 987.505421] env[69994]: DEBUG oslo_concurrency.lockutils [None req-796ba68f-0266-4631-8f8f-df1518697467 tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Lock "0d42c1c7-2ac1-44f3-8311-929f141e0a65-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 987.508617] env[69994]: INFO nova.compute.manager [None req-796ba68f-0266-4631-8f8f-df1518697467 tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Terminating instance [ 987.533255] env[69994]: DEBUG nova.compute.manager [req-4dd0bb87-7f2f-465c-a2f2-4943eb6fc051 req-061eb567-26d0-46c9-9b2f-bd83f9d18ccb service nova] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Detach interface failed, port_id=58a0ef78-0177-4996-ba8f-adbf83a9c0e6, reason: Instance c14851d2-66c5-4865-ae66-abbe303f0c31 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 987.789467] env[69994]: DEBUG oslo_vmware.api [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242248, 'name': PowerOnVM_Task, 'duration_secs': 0.732383} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.796733] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 987.797284] env[69994]: INFO nova.compute.manager [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Took 7.38 seconds to spawn the instance on the hypervisor. [ 987.797568] env[69994]: DEBUG nova.compute.manager [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 987.799462] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-897989ba-7847-4454-937a-7c49e9e0fb28 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.831049] env[69994]: INFO nova.compute.manager [-] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Took 1.60 seconds to deallocate network for instance. [ 987.854269] env[69994]: DEBUG oslo_vmware.api [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': task-3242249, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.541191} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.854582] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 095e75b1-7806-4d1d-ab9e-49735f7aa0f3/095e75b1-7806-4d1d-ab9e-49735f7aa0f3.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 987.854797] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 987.855092] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7b942e47-105c-49db-8168-c0b86235ac5e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.863038] env[69994]: DEBUG oslo_vmware.api [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Waiting for the task: (returnval){ [ 987.863038] env[69994]: value = "task-3242250" [ 987.863038] env[69994]: _type = "Task" [ 987.863038] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.874958] env[69994]: DEBUG oslo_vmware.api [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': task-3242250, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.014626] env[69994]: DEBUG nova.compute.manager [None req-796ba68f-0266-4631-8f8f-df1518697467 tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 988.014918] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-796ba68f-0266-4631-8f8f-df1518697467 tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 988.015903] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b4c2c4c-3d2b-4036-b3f7-c116af00a870 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.026806] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-796ba68f-0266-4631-8f8f-df1518697467 tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 988.027202] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e253b11c-a401-4894-b1a6-3b73327e9353 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.035537] env[69994]: DEBUG oslo_vmware.api [None req-796ba68f-0266-4631-8f8f-df1518697467 tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Waiting for the task: (returnval){ [ 988.035537] env[69994]: value = "task-3242251" [ 988.035537] env[69994]: _type = "Task" [ 988.035537] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.052018] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7aba06a5-74ff-49c6-a762-ea6356167d28 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 988.054060] env[69994]: DEBUG oslo_vmware.api [None req-796ba68f-0266-4631-8f8f-df1518697467 tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': task-3242251, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.054729] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.508s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 988.057279] env[69994]: INFO nova.compute.claims [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 988.093467] env[69994]: DEBUG nova.network.neutron [req-de93038d-347a-4225-a9f0-5f76c5fdf0ae req-21519e71-8f76-4f9d-b7c6-1c5a5503815c service nova] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Updated VIF entry in instance network info cache for port d4a8692e-1b97-42dd-a02a-53c07d85ad0a. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 988.095461] env[69994]: DEBUG nova.network.neutron [req-de93038d-347a-4225-a9f0-5f76c5fdf0ae req-21519e71-8f76-4f9d-b7c6-1c5a5503815c service nova] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Updating instance_info_cache with network_info: [{"id": "d4a8692e-1b97-42dd-a02a-53c07d85ad0a", "address": "fa:16:3e:40:0c:90", "network": {"id": "740b7ac0-1366-44a5-8ce6-82ec9b338dce", "bridge": "br-int", "label": "tempest-ServersTestJSON-1662708788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.222", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d55223bc0a464f1fa4d3b200926fd64f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "352165bb-004f-4180-9627-3a275dbe18af", "external-id": "nsx-vlan-transportzone-926", "segmentation_id": 926, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd4a8692e-1b", "ovs_interfaceid": "d4a8692e-1b97-42dd-a02a-53c07d85ad0a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.114808] env[69994]: DEBUG oslo_concurrency.lockutils [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Acquiring lock "refresh_cache-6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.115106] env[69994]: DEBUG oslo_concurrency.lockutils [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Acquired lock "refresh_cache-6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 988.115379] env[69994]: DEBUG nova.network.neutron [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 988.325918] env[69994]: INFO nova.compute.manager [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Took 30.73 seconds to build instance. [ 988.374023] env[69994]: DEBUG oslo_vmware.api [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': task-3242250, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.230804} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.374388] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 988.375206] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95c20650-a4ad-4158-9e67-cb32fb744e6f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.395809] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] 095e75b1-7806-4d1d-ab9e-49735f7aa0f3/095e75b1-7806-4d1d-ab9e-49735f7aa0f3.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 988.397392] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-32226650-b562-470f-82b1-d94aabffc83d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.411746] env[69994]: INFO nova.compute.manager [None req-fc7f918b-bc02-4759-8bdd-8945b25214f8 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Took 0.58 seconds to detach 1 volumes for instance. [ 988.414372] env[69994]: DEBUG nova.compute.manager [None req-fc7f918b-bc02-4759-8bdd-8945b25214f8 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Deleting volume: f439bb0f-f9f2-4fca-9d5c-9ad196d08d41 {{(pid=69994) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 988.418577] env[69994]: DEBUG oslo_vmware.api [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Waiting for the task: (returnval){ [ 988.418577] env[69994]: value = "task-3242252" [ 988.418577] env[69994]: _type = "Task" [ 988.418577] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.428152] env[69994]: DEBUG oslo_vmware.api [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': task-3242252, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.549318] env[69994]: DEBUG oslo_vmware.api [None req-796ba68f-0266-4631-8f8f-df1518697467 tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': task-3242251, 'name': PowerOffVM_Task, 'duration_secs': 0.497659} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.549906] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-796ba68f-0266-4631-8f8f-df1518697467 tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 988.550105] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-796ba68f-0266-4631-8f8f-df1518697467 tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 988.550394] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8ad8f5a5-8bad-4884-b7fe-938cd3047d02 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.599296] env[69994]: DEBUG oslo_concurrency.lockutils [req-de93038d-347a-4225-a9f0-5f76c5fdf0ae req-21519e71-8f76-4f9d-b7c6-1c5a5503815c service nova] Releasing lock "refresh_cache-850930f9-d5fb-4546-9796-30e164a1cdd3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 988.759927] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-796ba68f-0266-4631-8f8f-df1518697467 tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 988.760196] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-796ba68f-0266-4631-8f8f-df1518697467 tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 988.760380] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-796ba68f-0266-4631-8f8f-df1518697467 tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Deleting the datastore file [datastore1] 0d42c1c7-2ac1-44f3-8311-929f141e0a65 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 988.760638] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-058d4859-a6bd-47eb-9334-b6c7257311ba {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.769428] env[69994]: DEBUG oslo_vmware.api [None req-796ba68f-0266-4631-8f8f-df1518697467 tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Waiting for the task: (returnval){ [ 988.769428] env[69994]: value = "task-3242255" [ 988.769428] env[69994]: _type = "Task" [ 988.769428] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.782671] env[69994]: DEBUG oslo_vmware.api [None req-796ba68f-0266-4631-8f8f-df1518697467 tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': task-3242255, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.829370] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9711b9ad-370f-4f75-8a60-328e14b3de79 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "e17fcc84-7c86-41b6-88ec-8a35619534b6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.246s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 988.928361] env[69994]: DEBUG oslo_vmware.api [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': task-3242252, 'name': ReconfigVM_Task, 'duration_secs': 0.472833} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.928566] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Reconfigured VM instance instance-00000050 to attach disk [datastore1] 095e75b1-7806-4d1d-ab9e-49735f7aa0f3/095e75b1-7806-4d1d-ab9e-49735f7aa0f3.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 988.929310] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e38c484a-3a7a-4d40-8346-3e0488c61846 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.935884] env[69994]: DEBUG oslo_vmware.api [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Waiting for the task: (returnval){ [ 988.935884] env[69994]: value = "task-3242256" [ 988.935884] env[69994]: _type = "Task" [ 988.935884] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.945691] env[69994]: DEBUG oslo_vmware.api [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': task-3242256, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.952211] env[69994]: DEBUG nova.network.neutron [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Updating instance_info_cache with network_info: [{"id": "e4706905-12e3-43b1-a83a-409585a96042", "address": "fa:16:3e:2d:f6:a9", "network": {"id": "ffc05707-d9b8-4f04-8df9-5c384e0942c5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1300021035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f035f8fbac46483fb4d70f166df319b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4706905-12", "ovs_interfaceid": "e4706905-12e3-43b1-a83a-409585a96042", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.975544] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc7f918b-bc02-4759-8bdd-8945b25214f8 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 988.998520] env[69994]: DEBUG nova.compute.manager [req-7b3c9133-61e2-4e3a-81cd-53438295eb2f req-0e3147af-4918-46c1-9e8a-3ea508c34628 service nova] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Received event network-vif-plugged-e4706905-12e3-43b1-a83a-409585a96042 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 988.998520] env[69994]: DEBUG oslo_concurrency.lockutils [req-7b3c9133-61e2-4e3a-81cd-53438295eb2f req-0e3147af-4918-46c1-9e8a-3ea508c34628 service nova] Acquiring lock "6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 988.998520] env[69994]: DEBUG oslo_concurrency.lockutils [req-7b3c9133-61e2-4e3a-81cd-53438295eb2f req-0e3147af-4918-46c1-9e8a-3ea508c34628 service nova] Lock "6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 988.998520] env[69994]: DEBUG oslo_concurrency.lockutils [req-7b3c9133-61e2-4e3a-81cd-53438295eb2f req-0e3147af-4918-46c1-9e8a-3ea508c34628 service nova] Lock "6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 988.998520] env[69994]: DEBUG nova.compute.manager [req-7b3c9133-61e2-4e3a-81cd-53438295eb2f req-0e3147af-4918-46c1-9e8a-3ea508c34628 service nova] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] No waiting events found dispatching network-vif-plugged-e4706905-12e3-43b1-a83a-409585a96042 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 988.998520] env[69994]: WARNING nova.compute.manager [req-7b3c9133-61e2-4e3a-81cd-53438295eb2f req-0e3147af-4918-46c1-9e8a-3ea508c34628 service nova] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Received unexpected event network-vif-plugged-e4706905-12e3-43b1-a83a-409585a96042 for instance with vm_state shelved_offloaded and task_state spawning. [ 988.998520] env[69994]: DEBUG nova.compute.manager [req-7b3c9133-61e2-4e3a-81cd-53438295eb2f req-0e3147af-4918-46c1-9e8a-3ea508c34628 service nova] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Received event network-changed-e4706905-12e3-43b1-a83a-409585a96042 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 989.000569] env[69994]: DEBUG nova.compute.manager [req-7b3c9133-61e2-4e3a-81cd-53438295eb2f req-0e3147af-4918-46c1-9e8a-3ea508c34628 service nova] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Refreshing instance network info cache due to event network-changed-e4706905-12e3-43b1-a83a-409585a96042. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 989.000914] env[69994]: DEBUG oslo_concurrency.lockutils [req-7b3c9133-61e2-4e3a-81cd-53438295eb2f req-0e3147af-4918-46c1-9e8a-3ea508c34628 service nova] Acquiring lock "refresh_cache-6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.280413] env[69994]: DEBUG oslo_vmware.api [None req-796ba68f-0266-4631-8f8f-df1518697467 tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': task-3242255, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.257382} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.280684] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-796ba68f-0266-4631-8f8f-df1518697467 tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 989.280891] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-796ba68f-0266-4631-8f8f-df1518697467 tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 989.281085] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-796ba68f-0266-4631-8f8f-df1518697467 tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 989.281258] env[69994]: INFO nova.compute.manager [None req-796ba68f-0266-4631-8f8f-df1518697467 tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Took 1.27 seconds to destroy the instance on the hypervisor. [ 989.281498] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-796ba68f-0266-4631-8f8f-df1518697467 tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 989.281678] env[69994]: DEBUG nova.compute.manager [-] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 989.281773] env[69994]: DEBUG nova.network.neutron [-] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 989.314637] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64fc77fb-4c01-4cbe-84da-2335a755913a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.322330] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-846d25db-5af3-4f14-891e-2f02e784f4a0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.354552] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4a56553-b0ac-452d-981f-69444bef8a19 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.365685] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87707f0b-bd31-4e90-9fca-ad9ff10d0d45 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.386091] env[69994]: DEBUG nova.compute.provider_tree [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 989.446009] env[69994]: DEBUG oslo_vmware.api [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': task-3242256, 'name': Rename_Task, 'duration_secs': 0.126168} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.446312] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 989.446551] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a964b18c-8d9c-46e3-afbe-5a8bae332e39 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.453606] env[69994]: DEBUG oslo_vmware.api [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Waiting for the task: (returnval){ [ 989.453606] env[69994]: value = "task-3242257" [ 989.453606] env[69994]: _type = "Task" [ 989.453606] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.456987] env[69994]: DEBUG oslo_concurrency.lockutils [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Releasing lock "refresh_cache-6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 989.459892] env[69994]: DEBUG oslo_concurrency.lockutils [req-7b3c9133-61e2-4e3a-81cd-53438295eb2f req-0e3147af-4918-46c1-9e8a-3ea508c34628 service nova] Acquired lock "refresh_cache-6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 989.460890] env[69994]: DEBUG nova.network.neutron [req-7b3c9133-61e2-4e3a-81cd-53438295eb2f req-0e3147af-4918-46c1-9e8a-3ea508c34628 service nova] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Refreshing network info cache for port e4706905-12e3-43b1-a83a-409585a96042 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 989.470150] env[69994]: DEBUG oslo_vmware.api [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': task-3242257, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.499239] env[69994]: DEBUG nova.compute.manager [req-9a811ec5-3315-4992-ba9a-b84e225881a6 req-1ddb202a-ca8f-46b0-bfa4-03e9a29668a2 service nova] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Received event network-changed-b2521bc7-942e-4d29-bc89-0fd13a02f783 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 989.500179] env[69994]: DEBUG nova.compute.manager [req-9a811ec5-3315-4992-ba9a-b84e225881a6 req-1ddb202a-ca8f-46b0-bfa4-03e9a29668a2 service nova] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Refreshing instance network info cache due to event network-changed-b2521bc7-942e-4d29-bc89-0fd13a02f783. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 989.500333] env[69994]: DEBUG oslo_concurrency.lockutils [req-9a811ec5-3315-4992-ba9a-b84e225881a6 req-1ddb202a-ca8f-46b0-bfa4-03e9a29668a2 service nova] Acquiring lock "refresh_cache-e17fcc84-7c86-41b6-88ec-8a35619534b6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.500482] env[69994]: DEBUG oslo_concurrency.lockutils [req-9a811ec5-3315-4992-ba9a-b84e225881a6 req-1ddb202a-ca8f-46b0-bfa4-03e9a29668a2 service nova] Acquired lock "refresh_cache-e17fcc84-7c86-41b6-88ec-8a35619534b6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 989.500641] env[69994]: DEBUG nova.network.neutron [req-9a811ec5-3315-4992-ba9a-b84e225881a6 req-1ddb202a-ca8f-46b0-bfa4-03e9a29668a2 service nova] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Refreshing network info cache for port b2521bc7-942e-4d29-bc89-0fd13a02f783 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 989.505740] env[69994]: DEBUG nova.virt.hardware [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='0189cec9823d9f0659aeb72a3fda44d9',container_format='bare',created_at=2025-04-03T08:44:35Z,direct_url=,disk_format='vmdk',id=18590b17-addb-4605-8ce4-cb732b6f48da,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-1142186707-shelved',owner='f035f8fbac46483fb4d70f166df319b6',properties=ImageMetaProps,protected=,size=31590912,status='active',tags=,updated_at=2025-04-03T08:44:49Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 989.505969] env[69994]: DEBUG nova.virt.hardware [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 989.506143] env[69994]: DEBUG nova.virt.hardware [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 989.506331] env[69994]: DEBUG nova.virt.hardware [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 989.506477] env[69994]: DEBUG nova.virt.hardware [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 989.506633] env[69994]: DEBUG nova.virt.hardware [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 989.506821] env[69994]: DEBUG nova.virt.hardware [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 989.507034] env[69994]: DEBUG nova.virt.hardware [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 989.507155] env[69994]: DEBUG nova.virt.hardware [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 989.507335] env[69994]: DEBUG nova.virt.hardware [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 989.507532] env[69994]: DEBUG nova.virt.hardware [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 989.509021] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-055a236f-6895-4d8a-a765-773050fde6d2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.520189] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7403846d-bb68-4f70-9432-97566218497d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.537405] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2d:f6:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '35ac9709-fd8b-4630-897a-68ed629d1b11', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e4706905-12e3-43b1-a83a-409585a96042', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 989.545436] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 989.545436] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 989.545619] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2f2be980-1b71-48e7-bc77-59c9abe56efd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.564215] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 989.564215] env[69994]: value = "task-3242258" [ 989.564215] env[69994]: _type = "Task" [ 989.564215] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.571879] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242258, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.889292] env[69994]: DEBUG nova.scheduler.client.report [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 989.969156] env[69994]: DEBUG oslo_vmware.api [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': task-3242257, 'name': PowerOnVM_Task, 'duration_secs': 0.483873} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.969156] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 989.969358] env[69994]: INFO nova.compute.manager [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Took 5.33 seconds to spawn the instance on the hypervisor. [ 989.969540] env[69994]: DEBUG nova.compute.manager [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 989.970709] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df46729d-aac7-47e9-90c4-171283055595 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.080146] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242258, 'name': CreateVM_Task, 'duration_secs': 0.362242} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.080383] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 990.081140] env[69994]: DEBUG oslo_concurrency.lockutils [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/18590b17-addb-4605-8ce4-cb732b6f48da" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.081337] env[69994]: DEBUG oslo_concurrency.lockutils [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Acquired lock "[datastore2] devstack-image-cache_base/18590b17-addb-4605-8ce4-cb732b6f48da" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 990.082218] env[69994]: DEBUG oslo_concurrency.lockutils [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/18590b17-addb-4605-8ce4-cb732b6f48da" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 990.082218] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbac17da-3a82-43e1-b20f-4e543bccb5c4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.088954] env[69994]: DEBUG oslo_vmware.api [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for the task: (returnval){ [ 990.088954] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52767038-44fe-a7de-3b77-68d6e95d0dfc" [ 990.088954] env[69994]: _type = "Task" [ 990.088954] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.101187] env[69994]: DEBUG oslo_vmware.api [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52767038-44fe-a7de-3b77-68d6e95d0dfc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.278276] env[69994]: DEBUG nova.network.neutron [req-7b3c9133-61e2-4e3a-81cd-53438295eb2f req-0e3147af-4918-46c1-9e8a-3ea508c34628 service nova] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Updated VIF entry in instance network info cache for port e4706905-12e3-43b1-a83a-409585a96042. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 990.278708] env[69994]: DEBUG nova.network.neutron [req-7b3c9133-61e2-4e3a-81cd-53438295eb2f req-0e3147af-4918-46c1-9e8a-3ea508c34628 service nova] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Updating instance_info_cache with network_info: [{"id": "e4706905-12e3-43b1-a83a-409585a96042", "address": "fa:16:3e:2d:f6:a9", "network": {"id": "ffc05707-d9b8-4f04-8df9-5c384e0942c5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1300021035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f035f8fbac46483fb4d70f166df319b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4706905-12", "ovs_interfaceid": "e4706905-12e3-43b1-a83a-409585a96042", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 990.286589] env[69994]: DEBUG nova.network.neutron [req-9a811ec5-3315-4992-ba9a-b84e225881a6 req-1ddb202a-ca8f-46b0-bfa4-03e9a29668a2 service nova] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Updated VIF entry in instance network info cache for port b2521bc7-942e-4d29-bc89-0fd13a02f783. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 990.286974] env[69994]: DEBUG nova.network.neutron [req-9a811ec5-3315-4992-ba9a-b84e225881a6 req-1ddb202a-ca8f-46b0-bfa4-03e9a29668a2 service nova] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Updating instance_info_cache with network_info: [{"id": "b2521bc7-942e-4d29-bc89-0fd13a02f783", "address": "fa:16:3e:90:f2:06", "network": {"id": "6d4a143e-4f55-4918-8454-462892aacf0e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1594130263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "605d72502cc644bfa4d875bf348246de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52e117d3-d120-42c6-8e72-70085845acbf", "external-id": "nsx-vlan-transportzone-934", "segmentation_id": 934, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2521bc7-94", "ovs_interfaceid": "b2521bc7-942e-4d29-bc89-0fd13a02f783", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 990.398158] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.343s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 990.398800] env[69994]: DEBUG nova.compute.manager [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 990.401386] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.775s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 990.402863] env[69994]: INFO nova.compute.claims [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 990.491974] env[69994]: INFO nova.compute.manager [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Took 19.73 seconds to build instance. [ 990.557031] env[69994]: DEBUG nova.network.neutron [-] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 990.600977] env[69994]: DEBUG oslo_concurrency.lockutils [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Releasing lock "[datastore2] devstack-image-cache_base/18590b17-addb-4605-8ce4-cb732b6f48da" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 990.601287] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Processing image 18590b17-addb-4605-8ce4-cb732b6f48da {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 990.601794] env[69994]: DEBUG oslo_concurrency.lockutils [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/18590b17-addb-4605-8ce4-cb732b6f48da/18590b17-addb-4605-8ce4-cb732b6f48da.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.602044] env[69994]: DEBUG oslo_concurrency.lockutils [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Acquired lock "[datastore2] devstack-image-cache_base/18590b17-addb-4605-8ce4-cb732b6f48da/18590b17-addb-4605-8ce4-cb732b6f48da.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 990.602250] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 990.602498] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6b6861bd-9327-4e3a-ba86-44a3d73f294e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.613559] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 990.613735] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 990.614524] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c161b38d-fec7-4818-a18e-ccbdc34b481a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.619966] env[69994]: DEBUG oslo_vmware.api [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for the task: (returnval){ [ 990.619966] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52d5ec38-7057-7a47-c97b-8d973663f6e1" [ 990.619966] env[69994]: _type = "Task" [ 990.619966] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.627487] env[69994]: DEBUG oslo_vmware.api [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52d5ec38-7057-7a47-c97b-8d973663f6e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.781531] env[69994]: DEBUG oslo_concurrency.lockutils [req-7b3c9133-61e2-4e3a-81cd-53438295eb2f req-0e3147af-4918-46c1-9e8a-3ea508c34628 service nova] Releasing lock "refresh_cache-6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 990.790377] env[69994]: DEBUG oslo_concurrency.lockutils [req-9a811ec5-3315-4992-ba9a-b84e225881a6 req-1ddb202a-ca8f-46b0-bfa4-03e9a29668a2 service nova] Releasing lock "refresh_cache-e17fcc84-7c86-41b6-88ec-8a35619534b6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 990.907548] env[69994]: DEBUG nova.compute.utils [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 990.910841] env[69994]: DEBUG nova.compute.manager [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 990.911026] env[69994]: DEBUG nova.network.neutron [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 990.985553] env[69994]: DEBUG nova.policy [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '761ebe718b0f48939612e82c6b1e6766', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a4c158f7555d4606b641be4264d95eaa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 990.994174] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3c71cb9b-f217-4ead-b7b6-e34eaa6c2985 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Lock "095e75b1-7806-4d1d-ab9e-49735f7aa0f3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.247s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 991.050515] env[69994]: DEBUG nova.compute.manager [req-ac54cfb3-883d-4ebf-9d08-da239827255c req-78af1008-1ecd-43fb-b731-2d0f4cc55cd9 service nova] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Received event network-vif-deleted-aa9589d7-176c-4249-9a3a-0af202829e70 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 991.050707] env[69994]: DEBUG nova.compute.manager [req-ac54cfb3-883d-4ebf-9d08-da239827255c req-78af1008-1ecd-43fb-b731-2d0f4cc55cd9 service nova] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Received event network-vif-deleted-f249cccf-9606-4021-91bd-19028a6c4cbf {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 991.051008] env[69994]: DEBUG nova.compute.manager [req-ac54cfb3-883d-4ebf-9d08-da239827255c req-78af1008-1ecd-43fb-b731-2d0f4cc55cd9 service nova] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Received event network-vif-deleted-c14a1492-8af8-4c93-bf0d-f2424cd1f335 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 991.060673] env[69994]: INFO nova.compute.manager [-] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Took 1.78 seconds to deallocate network for instance. [ 991.130681] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Preparing fetch location {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 991.130950] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Fetch image to [datastore2] OSTACK_IMG_7e862623-f8af-4ec9-a3e8-25e3618abf23/OSTACK_IMG_7e862623-f8af-4ec9-a3e8-25e3618abf23.vmdk {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 991.131153] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Downloading stream optimized image 18590b17-addb-4605-8ce4-cb732b6f48da to [datastore2] OSTACK_IMG_7e862623-f8af-4ec9-a3e8-25e3618abf23/OSTACK_IMG_7e862623-f8af-4ec9-a3e8-25e3618abf23.vmdk on the data store datastore2 as vApp {{(pid=69994) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 991.131324] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Downloading image file data 18590b17-addb-4605-8ce4-cb732b6f48da to the ESX as VM named 'OSTACK_IMG_7e862623-f8af-4ec9-a3e8-25e3618abf23' {{(pid=69994) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 991.228847] env[69994]: DEBUG oslo_vmware.rw_handles [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 991.228847] env[69994]: value = "resgroup-9" [ 991.228847] env[69994]: _type = "ResourcePool" [ 991.228847] env[69994]: }. {{(pid=69994) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 991.229178] env[69994]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-776444c9-e764-44b1-92b6-e87c33f8fcea {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.257815] env[69994]: DEBUG oslo_vmware.rw_handles [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Lease: (returnval){ [ 991.257815] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]522db3c1-e860-e79c-6688-034d87299e72" [ 991.257815] env[69994]: _type = "HttpNfcLease" [ 991.257815] env[69994]: } obtained for vApp import into resource pool (val){ [ 991.257815] env[69994]: value = "resgroup-9" [ 991.257815] env[69994]: _type = "ResourcePool" [ 991.257815] env[69994]: }. {{(pid=69994) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 991.258157] env[69994]: DEBUG oslo_vmware.api [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for the lease: (returnval){ [ 991.258157] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]522db3c1-e860-e79c-6688-034d87299e72" [ 991.258157] env[69994]: _type = "HttpNfcLease" [ 991.258157] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 991.264917] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 991.264917] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]522db3c1-e860-e79c-6688-034d87299e72" [ 991.264917] env[69994]: _type = "HttpNfcLease" [ 991.264917] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 991.434372] env[69994]: DEBUG nova.compute.manager [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 991.567273] env[69994]: DEBUG oslo_concurrency.lockutils [None req-796ba68f-0266-4631-8f8f-df1518697467 tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 991.588701] env[69994]: DEBUG nova.network.neutron [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Successfully created port: cbe6409a-2178-4561-aae1-8f9071a0c976 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 991.696785] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-529024ec-0b9c-4a96-b8e9-d8d478c4fa45 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.704317] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1784b2f1-c9d8-418d-a261-18b3e00e5ad5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.736013] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67170e7c-3a07-4909-8f3c-1117bdb493a1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.743464] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69386adf-ceb4-48c6-83ac-7b1c45148629 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.756536] env[69994]: DEBUG nova.compute.provider_tree [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 991.765009] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 991.765213] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 991.765213] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]522db3c1-e860-e79c-6688-034d87299e72" [ 991.765213] env[69994]: _type = "HttpNfcLease" [ 991.765213] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 991.765931] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 992.013305] env[69994]: INFO nova.compute.manager [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Rebuilding instance [ 992.058290] env[69994]: DEBUG nova.compute.manager [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 992.059487] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f8b07ed-e23f-40cd-9591-bfc4793f149e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.259330] env[69994]: DEBUG nova.scheduler.client.report [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 992.274117] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 992.274117] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 992.274117] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 992.274117] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 992.274117] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 992.274117] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 992.274117] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69994) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 992.276893] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 992.277048] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 992.277048] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]522db3c1-e860-e79c-6688-034d87299e72" [ 992.277048] env[69994]: _type = "HttpNfcLease" [ 992.277048] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 992.277536] env[69994]: DEBUG oslo_vmware.rw_handles [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 992.277536] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]522db3c1-e860-e79c-6688-034d87299e72" [ 992.277536] env[69994]: _type = "HttpNfcLease" [ 992.277536] env[69994]: }. {{(pid=69994) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 992.278296] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60735ce3-b992-4304-9ef2-3e2e050a2a5f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.287123] env[69994]: DEBUG oslo_vmware.rw_handles [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5283733d-34d4-f6b4-52f1-e553e7193ddd/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 992.287123] env[69994]: DEBUG oslo_vmware.rw_handles [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Creating HTTP connection to write to file with size = 31590912 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5283733d-34d4-f6b4-52f1-e553e7193ddd/disk-0.vmdk. {{(pid=69994) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 992.357656] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-00e52a1d-77ca-4d48-95de-f5d8e334725f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.428896] env[69994]: DEBUG nova.compute.manager [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 992.453866] env[69994]: DEBUG nova.virt.hardware [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 992.454133] env[69994]: DEBUG nova.virt.hardware [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 992.454379] env[69994]: DEBUG nova.virt.hardware [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 992.454478] env[69994]: DEBUG nova.virt.hardware [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 992.454609] env[69994]: DEBUG nova.virt.hardware [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 992.454774] env[69994]: DEBUG nova.virt.hardware [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 992.454993] env[69994]: DEBUG nova.virt.hardware [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 992.455182] env[69994]: DEBUG nova.virt.hardware [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 992.455355] env[69994]: DEBUG nova.virt.hardware [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 992.455518] env[69994]: DEBUG nova.virt.hardware [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 992.455692] env[69994]: DEBUG nova.virt.hardware [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 992.456587] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72e9bc65-9adb-4c9e-a121-90596306cb73 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.464976] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82061342-010d-4b40-ac3e-5aa3dfc77c8c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.772032] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.368s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 992.772032] env[69994]: DEBUG nova.compute.manager [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 992.782022] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8e6d9c45-9888-4ac5-92e0-8d5d60891b33 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.293s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 992.782022] env[69994]: DEBUG nova.objects.instance [None req-8e6d9c45-9888-4ac5-92e0-8d5d60891b33 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lazy-loading 'resources' on Instance uuid 4b3addd0-22b0-4793-af75-dba381c4a83f {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 992.782344] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 993.077022] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 993.077022] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-73e48e7e-228e-4d48-beea-db8f4478ba0b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.085669] env[69994]: DEBUG oslo_vmware.api [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Waiting for the task: (returnval){ [ 993.085669] env[69994]: value = "task-3242260" [ 993.085669] env[69994]: _type = "Task" [ 993.085669] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.096765] env[69994]: DEBUG oslo_vmware.api [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': task-3242260, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.284749] env[69994]: DEBUG nova.compute.utils [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 993.299306] env[69994]: DEBUG nova.compute.manager [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 993.299849] env[69994]: DEBUG nova.network.neutron [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 993.307534] env[69994]: DEBUG nova.compute.manager [req-80d1baf2-7a9b-4294-ada6-42533f91481d req-1e84c3d5-1c87-47d7-a882-6e0693d6bcc3 service nova] [instance: 290e8749-6860-4303-b966-65d2efee5499] Received event network-vif-plugged-cbe6409a-2178-4561-aae1-8f9071a0c976 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 993.308064] env[69994]: DEBUG oslo_concurrency.lockutils [req-80d1baf2-7a9b-4294-ada6-42533f91481d req-1e84c3d5-1c87-47d7-a882-6e0693d6bcc3 service nova] Acquiring lock "290e8749-6860-4303-b966-65d2efee5499-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 993.308378] env[69994]: DEBUG oslo_concurrency.lockutils [req-80d1baf2-7a9b-4294-ada6-42533f91481d req-1e84c3d5-1c87-47d7-a882-6e0693d6bcc3 service nova] Lock "290e8749-6860-4303-b966-65d2efee5499-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 993.308719] env[69994]: DEBUG oslo_concurrency.lockutils [req-80d1baf2-7a9b-4294-ada6-42533f91481d req-1e84c3d5-1c87-47d7-a882-6e0693d6bcc3 service nova] Lock "290e8749-6860-4303-b966-65d2efee5499-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 993.308948] env[69994]: DEBUG nova.compute.manager [req-80d1baf2-7a9b-4294-ada6-42533f91481d req-1e84c3d5-1c87-47d7-a882-6e0693d6bcc3 service nova] [instance: 290e8749-6860-4303-b966-65d2efee5499] No waiting events found dispatching network-vif-plugged-cbe6409a-2178-4561-aae1-8f9071a0c976 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 993.309184] env[69994]: WARNING nova.compute.manager [req-80d1baf2-7a9b-4294-ada6-42533f91481d req-1e84c3d5-1c87-47d7-a882-6e0693d6bcc3 service nova] [instance: 290e8749-6860-4303-b966-65d2efee5499] Received unexpected event network-vif-plugged-cbe6409a-2178-4561-aae1-8f9071a0c976 for instance with vm_state building and task_state spawning. [ 993.372636] env[69994]: DEBUG nova.policy [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b512f0a1ffba457b977e472009f59eed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '377f65074c2442588aee091b5165e1cf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 993.490503] env[69994]: DEBUG nova.network.neutron [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Successfully updated port: cbe6409a-2178-4561-aae1-8f9071a0c976 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 993.556202] env[69994]: DEBUG oslo_vmware.rw_handles [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Completed reading data from the image iterator. {{(pid=69994) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 993.556202] env[69994]: DEBUG oslo_vmware.rw_handles [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5283733d-34d4-f6b4-52f1-e553e7193ddd/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 993.556202] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a177a7af-d537-4425-8e34-9f4dfd5baa4b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.566731] env[69994]: DEBUG oslo_vmware.rw_handles [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5283733d-34d4-f6b4-52f1-e553e7193ddd/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 993.566982] env[69994]: DEBUG oslo_vmware.rw_handles [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5283733d-34d4-f6b4-52f1-e553e7193ddd/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 993.567293] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-e769cb7a-e13d-40f8-b0fc-f3aece3812f3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.596350] env[69994]: DEBUG oslo_vmware.api [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': task-3242260, 'name': PowerOffVM_Task, 'duration_secs': 0.259278} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.596406] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 993.596674] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 993.597680] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91d19025-7a10-4e7e-b745-f2bbe20c52f4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.609040] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 993.609662] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a763f082-4945-436a-a2f6-77a892a50916 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.634552] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 993.634777] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 993.634965] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Deleting the datastore file [datastore1] 095e75b1-7806-4d1d-ab9e-49735f7aa0f3 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 993.635257] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-68d1270b-d61f-418f-af97-6711c41b92cb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.640184] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2fe6f90-2819-40c7-8dcb-cf71d82103bd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.643802] env[69994]: DEBUG oslo_vmware.api [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Waiting for the task: (returnval){ [ 993.643802] env[69994]: value = "task-3242262" [ 993.643802] env[69994]: _type = "Task" [ 993.643802] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.650377] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf965f92-9b40-4cdf-9faf-1277d52b0422 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.656229] env[69994]: DEBUG oslo_vmware.api [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': task-3242262, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.685590] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88ed5f14-bc0a-43e0-97f5-b1ccb4346e4a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.694246] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fce0e3c-5c9b-4358-8768-e4f5c496af8f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.708100] env[69994]: DEBUG nova.compute.provider_tree [None req-8e6d9c45-9888-4ac5-92e0-8d5d60891b33 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 993.755396] env[69994]: DEBUG nova.network.neutron [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Successfully created port: cd8d4128-2422-4fb9-989c-0ceb2eb2123a {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 993.794181] env[69994]: DEBUG oslo_vmware.rw_handles [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5283733d-34d4-f6b4-52f1-e553e7193ddd/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 993.794364] env[69994]: INFO nova.virt.vmwareapi.images [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Downloaded image file data 18590b17-addb-4605-8ce4-cb732b6f48da [ 993.796440] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3956538d-f11d-455d-a9e4-b1d48a28f754 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.799341] env[69994]: DEBUG nova.compute.manager [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 993.816146] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aaaf470f-5d0e-44fd-8a44-034e79a19a61 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.838377] env[69994]: INFO nova.virt.vmwareapi.images [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] The imported VM was unregistered [ 993.841843] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Caching image {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 993.841843] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Creating directory with path [datastore2] devstack-image-cache_base/18590b17-addb-4605-8ce4-cb732b6f48da {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 993.841843] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bbe2dca9-fd44-491c-8a89-7fbd04e940ec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.852747] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Created directory with path [datastore2] devstack-image-cache_base/18590b17-addb-4605-8ce4-cb732b6f48da {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 993.852993] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_7e862623-f8af-4ec9-a3e8-25e3618abf23/OSTACK_IMG_7e862623-f8af-4ec9-a3e8-25e3618abf23.vmdk to [datastore2] devstack-image-cache_base/18590b17-addb-4605-8ce4-cb732b6f48da/18590b17-addb-4605-8ce4-cb732b6f48da.vmdk. {{(pid=69994) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 993.853283] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-73374890-7efe-4004-9ebc-7907e8c2541d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.861935] env[69994]: DEBUG oslo_vmware.api [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for the task: (returnval){ [ 993.861935] env[69994]: value = "task-3242264" [ 993.861935] env[69994]: _type = "Task" [ 993.861935] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.873685] env[69994]: DEBUG oslo_vmware.api [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242264, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.960105] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Acquiring lock "95b7d534-ac5b-4982-830d-bf65ecd610b3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 993.960361] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Lock "95b7d534-ac5b-4982-830d-bf65ecd610b3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 993.992290] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "refresh_cache-290e8749-6860-4303-b966-65d2efee5499" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.992290] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquired lock "refresh_cache-290e8749-6860-4303-b966-65d2efee5499" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 993.992290] env[69994]: DEBUG nova.network.neutron [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 994.156447] env[69994]: DEBUG oslo_vmware.api [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': task-3242262, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.112807} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.156447] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 994.156447] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 994.156800] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 994.211585] env[69994]: DEBUG nova.scheduler.client.report [None req-8e6d9c45-9888-4ac5-92e0-8d5d60891b33 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 994.377080] env[69994]: DEBUG oslo_vmware.api [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242264, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.462633] env[69994]: DEBUG nova.compute.manager [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 994.536024] env[69994]: DEBUG nova.network.neutron [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 994.718251] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8e6d9c45-9888-4ac5-92e0-8d5d60891b33 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.938s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 994.722859] env[69994]: DEBUG oslo_concurrency.lockutils [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.840s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 994.723443] env[69994]: DEBUG nova.objects.instance [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lazy-loading 'resources' on Instance uuid e638fe4f-5f75-4d38-8a58-15dd66fd9e27 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 994.746568] env[69994]: INFO nova.scheduler.client.report [None req-8e6d9c45-9888-4ac5-92e0-8d5d60891b33 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Deleted allocations for instance 4b3addd0-22b0-4793-af75-dba381c4a83f [ 994.802177] env[69994]: DEBUG nova.network.neutron [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Updating instance_info_cache with network_info: [{"id": "cbe6409a-2178-4561-aae1-8f9071a0c976", "address": "fa:16:3e:07:51:96", "network": {"id": "7880c31a-48c5-419f-856f-539c513f7147", "bridge": "br-int", "label": "tempest-ImagesTestJSON-152259375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4c158f7555d4606b641be4264d95eaa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbe6409a-21", "ovs_interfaceid": "cbe6409a-2178-4561-aae1-8f9071a0c976", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.819645] env[69994]: DEBUG nova.compute.manager [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 994.869613] env[69994]: DEBUG nova.virt.hardware [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 994.869942] env[69994]: DEBUG nova.virt.hardware [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 994.870184] env[69994]: DEBUG nova.virt.hardware [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 994.870432] env[69994]: DEBUG nova.virt.hardware [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 994.870619] env[69994]: DEBUG nova.virt.hardware [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 994.870814] env[69994]: DEBUG nova.virt.hardware [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 994.871094] env[69994]: DEBUG nova.virt.hardware [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 994.871296] env[69994]: DEBUG nova.virt.hardware [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 994.871498] env[69994]: DEBUG nova.virt.hardware [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 994.871729] env[69994]: DEBUG nova.virt.hardware [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 994.871964] env[69994]: DEBUG nova.virt.hardware [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 994.873332] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11a29308-a949-42ed-9c2c-e7f2d0fdaf97 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.886729] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddef5b85-0b6a-4151-bec1-c0c81ad2e99a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.891397] env[69994]: DEBUG oslo_vmware.api [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242264, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.984811] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 995.184598] env[69994]: DEBUG nova.compute.manager [req-cf5d138b-cd32-4619-bd7b-1c45227f2ebf req-61a9c830-c3d5-49a4-8e76-2fb6c6f0bf08 service nova] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Received event network-vif-plugged-cd8d4128-2422-4fb9-989c-0ceb2eb2123a {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 995.185431] env[69994]: DEBUG oslo_concurrency.lockutils [req-cf5d138b-cd32-4619-bd7b-1c45227f2ebf req-61a9c830-c3d5-49a4-8e76-2fb6c6f0bf08 service nova] Acquiring lock "8f5a5852-cd78-434f-b413-3cc2314575bb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 995.185431] env[69994]: DEBUG oslo_concurrency.lockutils [req-cf5d138b-cd32-4619-bd7b-1c45227f2ebf req-61a9c830-c3d5-49a4-8e76-2fb6c6f0bf08 service nova] Lock "8f5a5852-cd78-434f-b413-3cc2314575bb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 995.185431] env[69994]: DEBUG oslo_concurrency.lockutils [req-cf5d138b-cd32-4619-bd7b-1c45227f2ebf req-61a9c830-c3d5-49a4-8e76-2fb6c6f0bf08 service nova] Lock "8f5a5852-cd78-434f-b413-3cc2314575bb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 995.185575] env[69994]: DEBUG nova.compute.manager [req-cf5d138b-cd32-4619-bd7b-1c45227f2ebf req-61a9c830-c3d5-49a4-8e76-2fb6c6f0bf08 service nova] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] No waiting events found dispatching network-vif-plugged-cd8d4128-2422-4fb9-989c-0ceb2eb2123a {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 995.185608] env[69994]: WARNING nova.compute.manager [req-cf5d138b-cd32-4619-bd7b-1c45227f2ebf req-61a9c830-c3d5-49a4-8e76-2fb6c6f0bf08 service nova] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Received unexpected event network-vif-plugged-cd8d4128-2422-4fb9-989c-0ceb2eb2123a for instance with vm_state building and task_state spawning. [ 995.201813] env[69994]: DEBUG nova.virt.hardware [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 995.202339] env[69994]: DEBUG nova.virt.hardware [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 995.202339] env[69994]: DEBUG nova.virt.hardware [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 995.202514] env[69994]: DEBUG nova.virt.hardware [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 995.202661] env[69994]: DEBUG nova.virt.hardware [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 995.202869] env[69994]: DEBUG nova.virt.hardware [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 995.203106] env[69994]: DEBUG nova.virt.hardware [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 995.203289] env[69994]: DEBUG nova.virt.hardware [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 995.203435] env[69994]: DEBUG nova.virt.hardware [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 995.203595] env[69994]: DEBUG nova.virt.hardware [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 995.203834] env[69994]: DEBUG nova.virt.hardware [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 995.204815] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a629645-8829-4b1f-8f75-b06d31a36451 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.216211] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f904bc9e-e0a8-466c-9387-d815e035ad2b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.234640] env[69994]: DEBUG nova.objects.instance [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lazy-loading 'numa_topology' on Instance uuid e638fe4f-5f75-4d38-8a58-15dd66fd9e27 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 995.238517] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Instance VIF info [] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 995.242300] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 995.243506] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 995.243742] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a7312e40-065b-4b50-8206-b8e4486737ef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.261102] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8e6d9c45-9888-4ac5-92e0-8d5d60891b33 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "4b3addd0-22b0-4793-af75-dba381c4a83f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.220s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 995.268885] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 995.268885] env[69994]: value = "task-3242265" [ 995.268885] env[69994]: _type = "Task" [ 995.268885] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.281553] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242265, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.286911] env[69994]: DEBUG nova.network.neutron [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Successfully updated port: cd8d4128-2422-4fb9-989c-0ceb2eb2123a {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 995.305321] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Releasing lock "refresh_cache-290e8749-6860-4303-b966-65d2efee5499" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 995.305650] env[69994]: DEBUG nova.compute.manager [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Instance network_info: |[{"id": "cbe6409a-2178-4561-aae1-8f9071a0c976", "address": "fa:16:3e:07:51:96", "network": {"id": "7880c31a-48c5-419f-856f-539c513f7147", "bridge": "br-int", "label": "tempest-ImagesTestJSON-152259375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4c158f7555d4606b641be4264d95eaa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbe6409a-21", "ovs_interfaceid": "cbe6409a-2178-4561-aae1-8f9071a0c976", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 995.306661] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:07:51:96', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4728adca-2846-416a-91a3-deb898faf1f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cbe6409a-2178-4561-aae1-8f9071a0c976', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 995.320012] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Creating folder: Project (a4c158f7555d4606b641be4264d95eaa). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 995.320897] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6c37323e-2e8a-4bda-b52d-66f2c55c4a68 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.336100] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Created folder: Project (a4c158f7555d4606b641be4264d95eaa) in parent group-v647729. [ 995.336465] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Creating folder: Instances. Parent ref: group-v647967. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 995.336805] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-75597c27-0f1e-43c3-b17b-2c1c404562bb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.343221] env[69994]: DEBUG nova.compute.manager [req-226180af-c022-48ed-89d5-fd39715b8a56 req-160f88ae-31b3-4281-87c8-b38079e7892b service nova] [instance: 290e8749-6860-4303-b966-65d2efee5499] Received event network-changed-cbe6409a-2178-4561-aae1-8f9071a0c976 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 995.343505] env[69994]: DEBUG nova.compute.manager [req-226180af-c022-48ed-89d5-fd39715b8a56 req-160f88ae-31b3-4281-87c8-b38079e7892b service nova] [instance: 290e8749-6860-4303-b966-65d2efee5499] Refreshing instance network info cache due to event network-changed-cbe6409a-2178-4561-aae1-8f9071a0c976. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 995.343882] env[69994]: DEBUG oslo_concurrency.lockutils [req-226180af-c022-48ed-89d5-fd39715b8a56 req-160f88ae-31b3-4281-87c8-b38079e7892b service nova] Acquiring lock "refresh_cache-290e8749-6860-4303-b966-65d2efee5499" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.344104] env[69994]: DEBUG oslo_concurrency.lockutils [req-226180af-c022-48ed-89d5-fd39715b8a56 req-160f88ae-31b3-4281-87c8-b38079e7892b service nova] Acquired lock "refresh_cache-290e8749-6860-4303-b966-65d2efee5499" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 995.344343] env[69994]: DEBUG nova.network.neutron [req-226180af-c022-48ed-89d5-fd39715b8a56 req-160f88ae-31b3-4281-87c8-b38079e7892b service nova] [instance: 290e8749-6860-4303-b966-65d2efee5499] Refreshing network info cache for port cbe6409a-2178-4561-aae1-8f9071a0c976 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 995.350532] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Created folder: Instances in parent group-v647967. [ 995.350822] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 995.351044] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 290e8749-6860-4303-b966-65d2efee5499] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 995.351200] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-882c32d4-c090-4f37-ba07-6c71453d4851 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.376589] env[69994]: DEBUG oslo_vmware.api [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242264, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.377980] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 995.377980] env[69994]: value = "task-3242268" [ 995.377980] env[69994]: _type = "Task" [ 995.377980] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.388907] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242268, 'name': CreateVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.743946] env[69994]: DEBUG nova.objects.base [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 995.780120] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242265, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.789844] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "refresh_cache-8f5a5852-cd78-434f-b413-3cc2314575bb" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.789844] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquired lock "refresh_cache-8f5a5852-cd78-434f-b413-3cc2314575bb" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 995.790035] env[69994]: DEBUG nova.network.neutron [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 995.882063] env[69994]: DEBUG oslo_vmware.api [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242264, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.892336] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242268, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.030342] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33da13d5-0834-43f0-a7a8-674b648099ab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.041264] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21ab47bb-16c3-4b4c-b219-4f6f4d08b9f2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.083316] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c304a164-90f2-4e85-98a8-c568d9baf884 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.094871] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97688812-f815-4bc9-8253-394f13c2818f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.112187] env[69994]: DEBUG nova.compute.provider_tree [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 996.125765] env[69994]: DEBUG nova.network.neutron [req-226180af-c022-48ed-89d5-fd39715b8a56 req-160f88ae-31b3-4281-87c8-b38079e7892b service nova] [instance: 290e8749-6860-4303-b966-65d2efee5499] Updated VIF entry in instance network info cache for port cbe6409a-2178-4561-aae1-8f9071a0c976. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 996.126144] env[69994]: DEBUG nova.network.neutron [req-226180af-c022-48ed-89d5-fd39715b8a56 req-160f88ae-31b3-4281-87c8-b38079e7892b service nova] [instance: 290e8749-6860-4303-b966-65d2efee5499] Updating instance_info_cache with network_info: [{"id": "cbe6409a-2178-4561-aae1-8f9071a0c976", "address": "fa:16:3e:07:51:96", "network": {"id": "7880c31a-48c5-419f-856f-539c513f7147", "bridge": "br-int", "label": "tempest-ImagesTestJSON-152259375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4c158f7555d4606b641be4264d95eaa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbe6409a-21", "ovs_interfaceid": "cbe6409a-2178-4561-aae1-8f9071a0c976", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.278961] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242265, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.323213] env[69994]: DEBUG nova.network.neutron [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 996.376720] env[69994]: DEBUG oslo_vmware.api [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242264, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.441248} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.377099] env[69994]: INFO nova.virt.vmwareapi.ds_util [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_7e862623-f8af-4ec9-a3e8-25e3618abf23/OSTACK_IMG_7e862623-f8af-4ec9-a3e8-25e3618abf23.vmdk to [datastore2] devstack-image-cache_base/18590b17-addb-4605-8ce4-cb732b6f48da/18590b17-addb-4605-8ce4-cb732b6f48da.vmdk. [ 996.377381] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Cleaning up location [datastore2] OSTACK_IMG_7e862623-f8af-4ec9-a3e8-25e3618abf23 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 996.377581] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_7e862623-f8af-4ec9-a3e8-25e3618abf23 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 996.377843] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1f15eab0-273d-40c9-97b5-094ff3ab532b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.383989] env[69994]: DEBUG oslo_vmware.api [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for the task: (returnval){ [ 996.383989] env[69994]: value = "task-3242269" [ 996.383989] env[69994]: _type = "Task" [ 996.383989] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.397101] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242268, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.400034] env[69994]: DEBUG oslo_vmware.api [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242269, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.458248] env[69994]: DEBUG nova.network.neutron [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Updating instance_info_cache with network_info: [{"id": "cd8d4128-2422-4fb9-989c-0ceb2eb2123a", "address": "fa:16:3e:57:80:f6", "network": {"id": "0e925a40-2706-4137-8dd4-eeed26ae606e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1885024452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "377f65074c2442588aee091b5165e1cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd8d4128-24", "ovs_interfaceid": "cd8d4128-2422-4fb9-989c-0ceb2eb2123a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.615638] env[69994]: DEBUG nova.scheduler.client.report [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 996.629082] env[69994]: DEBUG oslo_concurrency.lockutils [req-226180af-c022-48ed-89d5-fd39715b8a56 req-160f88ae-31b3-4281-87c8-b38079e7892b service nova] Releasing lock "refresh_cache-290e8749-6860-4303-b966-65d2efee5499" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 996.779457] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242265, 'name': CreateVM_Task, 'duration_secs': 1.374275} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.779608] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 996.780022] env[69994]: DEBUG oslo_concurrency.lockutils [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.780193] env[69994]: DEBUG oslo_concurrency.lockutils [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 996.780504] env[69994]: DEBUG oslo_concurrency.lockutils [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 996.780743] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-996ba7fc-2a24-49a6-a756-20dd4206ad1d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.785281] env[69994]: DEBUG oslo_vmware.api [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Waiting for the task: (returnval){ [ 996.785281] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5245b58d-7ecb-917c-98c1-0fed6b090396" [ 996.785281] env[69994]: _type = "Task" [ 996.785281] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.792362] env[69994]: DEBUG oslo_vmware.api [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5245b58d-7ecb-917c-98c1-0fed6b090396, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.896739] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242268, 'name': CreateVM_Task, 'duration_secs': 1.098348} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.899290] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 290e8749-6860-4303-b966-65d2efee5499] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 996.899570] env[69994]: DEBUG oslo_vmware.api [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242269, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.061551} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.900245] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.900416] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 996.900726] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 996.901042] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 996.901206] env[69994]: DEBUG oslo_concurrency.lockutils [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Releasing lock "[datastore2] devstack-image-cache_base/18590b17-addb-4605-8ce4-cb732b6f48da/18590b17-addb-4605-8ce4-cb732b6f48da.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 996.901425] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/18590b17-addb-4605-8ce4-cb732b6f48da/18590b17-addb-4605-8ce4-cb732b6f48da.vmdk to [datastore2] 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e/6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 996.901678] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6dc8a7af-efd8-498a-8950-bc5e29060a7d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.903237] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f2620d9a-024d-40e3-8422-980326e36524 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.907970] env[69994]: DEBUG oslo_vmware.api [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 996.907970] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]526c0b27-de7f-d0fc-2ec8-2a8d74f1584a" [ 996.907970] env[69994]: _type = "Task" [ 996.907970] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.912035] env[69994]: DEBUG oslo_vmware.api [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for the task: (returnval){ [ 996.912035] env[69994]: value = "task-3242270" [ 996.912035] env[69994]: _type = "Task" [ 996.912035] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.919457] env[69994]: DEBUG oslo_vmware.api [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]526c0b27-de7f-d0fc-2ec8-2a8d74f1584a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.923771] env[69994]: DEBUG oslo_vmware.api [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242270, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.960581] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Releasing lock "refresh_cache-8f5a5852-cd78-434f-b413-3cc2314575bb" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 996.960923] env[69994]: DEBUG nova.compute.manager [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Instance network_info: |[{"id": "cd8d4128-2422-4fb9-989c-0ceb2eb2123a", "address": "fa:16:3e:57:80:f6", "network": {"id": "0e925a40-2706-4137-8dd4-eeed26ae606e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1885024452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "377f65074c2442588aee091b5165e1cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd8d4128-24", "ovs_interfaceid": "cd8d4128-2422-4fb9-989c-0ceb2eb2123a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 996.961401] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:57:80:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '87bbf4e0-9064-4516-b7e7-44973f817205', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cd8d4128-2422-4fb9-989c-0ceb2eb2123a', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 996.968887] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 996.969119] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 996.969338] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c36dc39f-ecd6-419e-96b1-35801df637b9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.989037] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 996.989037] env[69994]: value = "task-3242271" [ 996.989037] env[69994]: _type = "Task" [ 996.989037] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.999013] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242271, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.120778] env[69994]: DEBUG oslo_concurrency.lockutils [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.398s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 997.123391] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5171f02-4c58-453e-96dd-a41642fcabb8 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.122s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 997.123653] env[69994]: DEBUG nova.objects.instance [None req-a5171f02-4c58-453e-96dd-a41642fcabb8 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Lazy-loading 'resources' on Instance uuid ee7e0c02-ef19-4475-a936-f591c8185797 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 997.210658] env[69994]: DEBUG nova.compute.manager [req-408fb89f-2948-457a-8287-5892a8530867 req-ce3cb1ce-d4b3-4526-b36e-97fd4ab2749d service nova] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Received event network-changed-cd8d4128-2422-4fb9-989c-0ceb2eb2123a {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 997.210658] env[69994]: DEBUG nova.compute.manager [req-408fb89f-2948-457a-8287-5892a8530867 req-ce3cb1ce-d4b3-4526-b36e-97fd4ab2749d service nova] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Refreshing instance network info cache due to event network-changed-cd8d4128-2422-4fb9-989c-0ceb2eb2123a. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 997.210658] env[69994]: DEBUG oslo_concurrency.lockutils [req-408fb89f-2948-457a-8287-5892a8530867 req-ce3cb1ce-d4b3-4526-b36e-97fd4ab2749d service nova] Acquiring lock "refresh_cache-8f5a5852-cd78-434f-b413-3cc2314575bb" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.210793] env[69994]: DEBUG oslo_concurrency.lockutils [req-408fb89f-2948-457a-8287-5892a8530867 req-ce3cb1ce-d4b3-4526-b36e-97fd4ab2749d service nova] Acquired lock "refresh_cache-8f5a5852-cd78-434f-b413-3cc2314575bb" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 997.210939] env[69994]: DEBUG nova.network.neutron [req-408fb89f-2948-457a-8287-5892a8530867 req-ce3cb1ce-d4b3-4526-b36e-97fd4ab2749d service nova] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Refreshing network info cache for port cd8d4128-2422-4fb9-989c-0ceb2eb2123a {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 997.298020] env[69994]: DEBUG oslo_vmware.api [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5245b58d-7ecb-917c-98c1-0fed6b090396, 'name': SearchDatastore_Task, 'duration_secs': 0.069106} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.298410] env[69994]: DEBUG oslo_concurrency.lockutils [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 997.298571] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 997.298804] env[69994]: DEBUG oslo_concurrency.lockutils [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.298954] env[69994]: DEBUG oslo_concurrency.lockutils [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 997.299152] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 997.299413] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8c0a9e67-70e4-462d-8a2a-825e7a9f9f34 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.316400] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 997.316581] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 997.317319] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71683733-56e7-428a-92bf-33557bcee62f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.322437] env[69994]: DEBUG oslo_vmware.api [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Waiting for the task: (returnval){ [ 997.322437] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]527842a1-dbc9-5e20-2ac5-77a5e7fe6e61" [ 997.322437] env[69994]: _type = "Task" [ 997.322437] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.329787] env[69994]: DEBUG oslo_vmware.api [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]527842a1-dbc9-5e20-2ac5-77a5e7fe6e61, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.421662] env[69994]: DEBUG oslo_vmware.api [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]526c0b27-de7f-d0fc-2ec8-2a8d74f1584a, 'name': SearchDatastore_Task, 'duration_secs': 0.047951} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.424760] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 997.425016] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 997.425311] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.425463] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 997.425641] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 997.425901] env[69994]: DEBUG oslo_vmware.api [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242270, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.426122] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5ef9c128-e768-4d72-9a04-26d0052f6563 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.433799] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 997.433971] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 997.434667] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbd2b17a-5eee-4219-9670-828cd51ed042 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.439629] env[69994]: DEBUG oslo_vmware.api [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 997.439629] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]521439fe-3398-1e4f-c46f-ebd3ff73e9c7" [ 997.439629] env[69994]: _type = "Task" [ 997.439629] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.446532] env[69994]: DEBUG oslo_vmware.api [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521439fe-3398-1e4f-c46f-ebd3ff73e9c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.498197] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242271, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.633250] env[69994]: DEBUG oslo_concurrency.lockutils [None req-227255ed-f864-4194-80b8-b7072b2b7137 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "e638fe4f-5f75-4d38-8a58-15dd66fd9e27" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 42.938s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 997.634138] env[69994]: DEBUG oslo_concurrency.lockutils [None req-72a6fba1-0326-4a57-a690-1d5dbe223d47 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "e638fe4f-5f75-4d38-8a58-15dd66fd9e27" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 19.141s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 997.634410] env[69994]: DEBUG oslo_concurrency.lockutils [None req-72a6fba1-0326-4a57-a690-1d5dbe223d47 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "e638fe4f-5f75-4d38-8a58-15dd66fd9e27-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 997.634639] env[69994]: DEBUG oslo_concurrency.lockutils [None req-72a6fba1-0326-4a57-a690-1d5dbe223d47 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "e638fe4f-5f75-4d38-8a58-15dd66fd9e27-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 997.634831] env[69994]: DEBUG oslo_concurrency.lockutils [None req-72a6fba1-0326-4a57-a690-1d5dbe223d47 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "e638fe4f-5f75-4d38-8a58-15dd66fd9e27-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 997.638079] env[69994]: INFO nova.compute.manager [None req-72a6fba1-0326-4a57-a690-1d5dbe223d47 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Terminating instance [ 997.835290] env[69994]: DEBUG oslo_vmware.api [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]527842a1-dbc9-5e20-2ac5-77a5e7fe6e61, 'name': SearchDatastore_Task, 'duration_secs': 0.036943} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.839579] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42451d2e-6253-4860-981a-71ceeebe2425 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.848828] env[69994]: DEBUG oslo_vmware.api [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Waiting for the task: (returnval){ [ 997.848828] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]525cd1dd-6e10-8960-c461-ff9218854c7e" [ 997.848828] env[69994]: _type = "Task" [ 997.848828] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.862391] env[69994]: DEBUG oslo_vmware.api [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]525cd1dd-6e10-8960-c461-ff9218854c7e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.875864] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b721679f-e8de-4af6-8902-93ce605c25d7 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquiring lock "ef410b09-8686-409e-8391-d50cd0e0df04" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 997.876112] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b721679f-e8de-4af6-8902-93ce605c25d7 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lock "ef410b09-8686-409e-8391-d50cd0e0df04" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 997.882885] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f6fb4d9-6518-4913-9986-7c6115071b0f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.893185] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d8fe922-4cfa-4009-8a77-cd52f6c97835 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.930246] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54dcbf7a-08f6-4075-9522-9905e224bcf0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.944501] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6378dc87-7987-4b49-8218-6bda5b4be893 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.949440] env[69994]: DEBUG oslo_vmware.api [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242270, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.959055] env[69994]: DEBUG oslo_vmware.api [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521439fe-3398-1e4f-c46f-ebd3ff73e9c7, 'name': SearchDatastore_Task, 'duration_secs': 0.029186} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.967962] env[69994]: DEBUG nova.compute.provider_tree [None req-a5171f02-4c58-453e-96dd-a41642fcabb8 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 997.973030] env[69994]: DEBUG nova.network.neutron [req-408fb89f-2948-457a-8287-5892a8530867 req-ce3cb1ce-d4b3-4526-b36e-97fd4ab2749d service nova] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Updated VIF entry in instance network info cache for port cd8d4128-2422-4fb9-989c-0ceb2eb2123a. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 997.973030] env[69994]: DEBUG nova.network.neutron [req-408fb89f-2948-457a-8287-5892a8530867 req-ce3cb1ce-d4b3-4526-b36e-97fd4ab2749d service nova] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Updating instance_info_cache with network_info: [{"id": "cd8d4128-2422-4fb9-989c-0ceb2eb2123a", "address": "fa:16:3e:57:80:f6", "network": {"id": "0e925a40-2706-4137-8dd4-eeed26ae606e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1885024452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "377f65074c2442588aee091b5165e1cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd8d4128-24", "ovs_interfaceid": "cd8d4128-2422-4fb9-989c-0ceb2eb2123a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.973030] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75496a75-c59f-4ad9-ae57-ffae0c68239b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.981435] env[69994]: DEBUG oslo_vmware.api [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 997.981435] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52c6591a-bc67-f802-3c6a-5286aac188b8" [ 997.981435] env[69994]: _type = "Task" [ 997.981435] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.993239] env[69994]: DEBUG oslo_vmware.api [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52c6591a-bc67-f802-3c6a-5286aac188b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.002916] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242271, 'name': CreateVM_Task, 'duration_secs': 0.748161} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.003171] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 998.003864] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.004134] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 998.004383] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 998.004648] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-200bbe36-95ed-4841-8006-530f06233247 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.010964] env[69994]: DEBUG oslo_vmware.api [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 998.010964] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52f01edd-439c-38ae-c664-08f76889f742" [ 998.010964] env[69994]: _type = "Task" [ 998.010964] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.024685] env[69994]: DEBUG oslo_vmware.api [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52f01edd-439c-38ae-c664-08f76889f742, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.142429] env[69994]: DEBUG nova.compute.manager [None req-72a6fba1-0326-4a57-a690-1d5dbe223d47 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 998.142600] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-72a6fba1-0326-4a57-a690-1d5dbe223d47 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 998.142802] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f07d5229-a129-4222-9f57-ba1986d1e839 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.156689] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4034e65-7749-4a7c-b4cc-ca939d668dd5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.193333] env[69994]: WARNING nova.virt.vmwareapi.vmops [None req-72a6fba1-0326-4a57-a690-1d5dbe223d47 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e638fe4f-5f75-4d38-8a58-15dd66fd9e27 could not be found. [ 998.193680] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-72a6fba1-0326-4a57-a690-1d5dbe223d47 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 998.193778] env[69994]: INFO nova.compute.manager [None req-72a6fba1-0326-4a57-a690-1d5dbe223d47 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Took 0.05 seconds to destroy the instance on the hypervisor. [ 998.193992] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-72a6fba1-0326-4a57-a690-1d5dbe223d47 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 998.195032] env[69994]: DEBUG nova.compute.manager [-] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 998.195032] env[69994]: DEBUG nova.network.neutron [-] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 998.363207] env[69994]: DEBUG oslo_vmware.api [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]525cd1dd-6e10-8960-c461-ff9218854c7e, 'name': SearchDatastore_Task, 'duration_secs': 0.019591} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.363586] env[69994]: DEBUG oslo_concurrency.lockutils [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 998.363849] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 095e75b1-7806-4d1d-ab9e-49735f7aa0f3/095e75b1-7806-4d1d-ab9e-49735f7aa0f3.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 998.364249] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-90b4fe0a-0568-4a32-92ad-90909a3e0eb0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.374029] env[69994]: DEBUG oslo_vmware.api [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Waiting for the task: (returnval){ [ 998.374029] env[69994]: value = "task-3242272" [ 998.374029] env[69994]: _type = "Task" [ 998.374029] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.379074] env[69994]: DEBUG nova.compute.utils [None req-b721679f-e8de-4af6-8902-93ce605c25d7 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 998.384728] env[69994]: DEBUG oslo_vmware.api [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': task-3242272, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.439746] env[69994]: DEBUG oslo_vmware.api [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242270, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.476626] env[69994]: DEBUG oslo_concurrency.lockutils [req-408fb89f-2948-457a-8287-5892a8530867 req-ce3cb1ce-d4b3-4526-b36e-97fd4ab2749d service nova] Releasing lock "refresh_cache-8f5a5852-cd78-434f-b413-3cc2314575bb" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 998.477889] env[69994]: DEBUG nova.scheduler.client.report [None req-a5171f02-4c58-453e-96dd-a41642fcabb8 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 998.493633] env[69994]: DEBUG oslo_vmware.api [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52c6591a-bc67-f802-3c6a-5286aac188b8, 'name': SearchDatastore_Task, 'duration_secs': 0.088239} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.493977] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 998.494438] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 290e8749-6860-4303-b966-65d2efee5499/290e8749-6860-4303-b966-65d2efee5499.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 998.494592] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e5a0534f-cf69-4fdc-9a2b-395950e64cb9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.504141] env[69994]: DEBUG oslo_vmware.api [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 998.504141] env[69994]: value = "task-3242273" [ 998.504141] env[69994]: _type = "Task" [ 998.504141] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.518696] env[69994]: DEBUG oslo_vmware.api [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242273, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.527350] env[69994]: DEBUG oslo_vmware.api [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52f01edd-439c-38ae-c664-08f76889f742, 'name': SearchDatastore_Task, 'duration_secs': 0.013558} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.527720] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 998.528013] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 998.528624] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.528624] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 998.528624] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 998.528813] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dc6ed648-0752-42ba-801f-142d83675e5d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.539876] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 998.540150] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 998.540915] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05339e6d-ecdd-4dc8-83a8-b6b933e891f5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.548017] env[69994]: DEBUG oslo_vmware.api [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 998.548017] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]524dcf88-85ef-27e1-7d52-1584b5ae023b" [ 998.548017] env[69994]: _type = "Task" [ 998.548017] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.556798] env[69994]: DEBUG oslo_vmware.api [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]524dcf88-85ef-27e1-7d52-1584b5ae023b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.881754] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b721679f-e8de-4af6-8902-93ce605c25d7 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lock "ef410b09-8686-409e-8391-d50cd0e0df04" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.005s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 998.888965] env[69994]: DEBUG oslo_vmware.api [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': task-3242272, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.939442] env[69994]: DEBUG oslo_vmware.api [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242270, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.951041] env[69994]: DEBUG nova.network.neutron [-] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 998.983118] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5171f02-4c58-453e-96dd-a41642fcabb8 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.860s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 998.986516] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.816s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 998.988030] env[69994]: INFO nova.compute.claims [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 999.010665] env[69994]: INFO nova.scheduler.client.report [None req-a5171f02-4c58-453e-96dd-a41642fcabb8 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Deleted allocations for instance ee7e0c02-ef19-4475-a936-f591c8185797 [ 999.020912] env[69994]: DEBUG oslo_vmware.api [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242273, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.059791] env[69994]: DEBUG oslo_vmware.api [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]524dcf88-85ef-27e1-7d52-1584b5ae023b, 'name': SearchDatastore_Task, 'duration_secs': 0.014371} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.061426] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6af80a1f-70a3-42c9-96e3-29ca438a1b3e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.069069] env[69994]: DEBUG oslo_vmware.api [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 999.069069] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]521f7c8d-e474-ca6c-c0a7-dcd782193413" [ 999.069069] env[69994]: _type = "Task" [ 999.069069] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.079648] env[69994]: DEBUG oslo_vmware.api [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521f7c8d-e474-ca6c-c0a7-dcd782193413, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.386704] env[69994]: DEBUG oslo_vmware.api [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': task-3242272, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.577622} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.387089] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 095e75b1-7806-4d1d-ab9e-49735f7aa0f3/095e75b1-7806-4d1d-ab9e-49735f7aa0f3.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 999.387384] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 999.387602] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-afe7ce45-5259-48a9-9155-13d0712639d6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.397680] env[69994]: DEBUG oslo_vmware.api [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Waiting for the task: (returnval){ [ 999.397680] env[69994]: value = "task-3242274" [ 999.397680] env[69994]: _type = "Task" [ 999.397680] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.408640] env[69994]: DEBUG oslo_vmware.api [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': task-3242274, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.439273] env[69994]: DEBUG oslo_vmware.api [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242270, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.460424] env[69994]: INFO nova.compute.manager [-] [instance: e638fe4f-5f75-4d38-8a58-15dd66fd9e27] Took 1.26 seconds to deallocate network for instance. [ 999.520216] env[69994]: DEBUG oslo_vmware.api [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242273, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.524689] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a5171f02-4c58-453e-96dd-a41642fcabb8 tempest-ServerShowV257Test-546585564 tempest-ServerShowV257Test-546585564-project-member] Lock "ee7e0c02-ef19-4475-a936-f591c8185797" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.824s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 999.582409] env[69994]: DEBUG oslo_vmware.api [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521f7c8d-e474-ca6c-c0a7-dcd782193413, 'name': SearchDatastore_Task, 'duration_secs': 0.014261} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.582732] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 999.583007] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 8f5a5852-cd78-434f-b413-3cc2314575bb/8f5a5852-cd78-434f-b413-3cc2314575bb.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 999.583317] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bd15741a-0311-4b38-86a4-c6e87b58e673 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.591631] env[69994]: DEBUG oslo_vmware.api [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 999.591631] env[69994]: value = "task-3242275" [ 999.591631] env[69994]: _type = "Task" [ 999.591631] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.603396] env[69994]: DEBUG oslo_vmware.api [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242275, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.910404] env[69994]: DEBUG oslo_vmware.api [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': task-3242274, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080572} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.910691] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 999.911578] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b66fdb82-932f-4cd5-8291-61de74c32efa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.932181] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] 095e75b1-7806-4d1d-ab9e-49735f7aa0f3/095e75b1-7806-4d1d-ab9e-49735f7aa0f3.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 999.932609] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8e104db8-4cb6-4bfe-a6b8-30891717fcb3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.956571] env[69994]: DEBUG oslo_vmware.api [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242270, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.827253} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.958219] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/18590b17-addb-4605-8ce4-cb732b6f48da/18590b17-addb-4605-8ce4-cb732b6f48da.vmdk to [datastore2] 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e/6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 999.958660] env[69994]: DEBUG oslo_vmware.api [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Waiting for the task: (returnval){ [ 999.958660] env[69994]: value = "task-3242276" [ 999.958660] env[69994]: _type = "Task" [ 999.958660] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.959359] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-434c8630-9135-43e3-bd6f-8a3eee872d58 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.962469] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b721679f-e8de-4af6-8902-93ce605c25d7 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquiring lock "ef410b09-8686-409e-8391-d50cd0e0df04" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 999.962717] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b721679f-e8de-4af6-8902-93ce605c25d7 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lock "ef410b09-8686-409e-8391-d50cd0e0df04" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 999.962947] env[69994]: INFO nova.compute.manager [None req-b721679f-e8de-4af6-8902-93ce605c25d7 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Attaching volume aca9e875-f8b1-40fd-8a48-9f448b62ea3b to /dev/sdb [ 999.997994] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Reconfiguring VM instance instance-00000033 to attach disk [datastore2] 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e/6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e.vmdk or device None with type streamOptimized {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 999.998367] env[69994]: DEBUG oslo_vmware.api [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': task-3242276, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.003397] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-52394131-f078-4534-b33c-a9427c1e713b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.032235] env[69994]: DEBUG oslo_vmware.api [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242273, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.033151] env[69994]: DEBUG oslo_vmware.api [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for the task: (returnval){ [ 1000.033151] env[69994]: value = "task-3242277" [ 1000.033151] env[69994]: _type = "Task" [ 1000.033151] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.042057] env[69994]: DEBUG oslo_vmware.api [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242277, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.045624] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0042413e-d246-4e94-bd09-3dc2aec0a82d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.056617] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ddc93bb-dc2f-4a46-aa66-77dda599de76 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.073472] env[69994]: DEBUG nova.virt.block_device [None req-b721679f-e8de-4af6-8902-93ce605c25d7 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Updating existing volume attachment record: f2695474-a7cf-43ff-a474-03f12fa2bf0e {{(pid=69994) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1000.103511] env[69994]: DEBUG oslo_vmware.api [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242275, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.293046] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2d82b90-fb97-4cb7-9220-1549315bb90d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.302704] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7094fa4-7b40-493c-9108-95517cb03f7b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.032654] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55250a90-341e-4388-8440-960a123ba57c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.035704] env[69994]: DEBUG oslo_concurrency.lockutils [None req-72a6fba1-0326-4a57-a690-1d5dbe223d47 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "e638fe4f-5f75-4d38-8a58-15dd66fd9e27" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.402s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1001.043473] env[69994]: DEBUG oslo_vmware.api [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242277, 'name': ReconfigVM_Task, 'duration_secs': 0.746689} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.052851] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Reconfigured VM instance instance-00000033 to attach disk [datastore2] 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e/6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e.vmdk or device None with type streamOptimized {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1001.053551] env[69994]: DEBUG oslo_vmware.api [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': task-3242276, 'name': ReconfigVM_Task, 'duration_secs': 0.442671} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.053767] env[69994]: DEBUG oslo_vmware.api [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242273, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.686731} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.053964] env[69994]: DEBUG oslo_vmware.api [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242275, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.66551} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.055122] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5f2421e2-d617-4368-887b-552931ff62e1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.056749] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Reconfigured VM instance instance-00000050 to attach disk [datastore1] 095e75b1-7806-4d1d-ab9e-49735f7aa0f3/095e75b1-7806-4d1d-ab9e-49735f7aa0f3.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1001.057303] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 290e8749-6860-4303-b966-65d2efee5499/290e8749-6860-4303-b966-65d2efee5499.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1001.057499] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1001.057729] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 8f5a5852-cd78-434f-b413-3cc2314575bb/8f5a5852-cd78-434f-b413-3cc2314575bb.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1001.057907] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1001.060090] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ebfc9c5a-2d11-4097-93e2-9bf3aa0dae37 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.061479] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-95dbde2a-4535-418a-bb4a-654745e10578 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.064161] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8e5a6e99-1861-4f3b-840f-afa7d8e4a060 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.066467] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8ee30a5-ef1b-4f07-9552-67b6ede530c3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.072115] env[69994]: DEBUG oslo_vmware.api [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for the task: (returnval){ [ 1001.072115] env[69994]: value = "task-3242281" [ 1001.072115] env[69994]: _type = "Task" [ 1001.072115] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.085678] env[69994]: DEBUG oslo_vmware.api [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Waiting for the task: (returnval){ [ 1001.085678] env[69994]: value = "task-3242282" [ 1001.085678] env[69994]: _type = "Task" [ 1001.085678] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.085945] env[69994]: DEBUG oslo_vmware.api [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1001.085945] env[69994]: value = "task-3242284" [ 1001.085945] env[69994]: _type = "Task" [ 1001.085945] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.086289] env[69994]: DEBUG oslo_vmware.api [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1001.086289] env[69994]: value = "task-3242283" [ 1001.086289] env[69994]: _type = "Task" [ 1001.086289] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.087067] env[69994]: DEBUG nova.compute.provider_tree [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1001.101959] env[69994]: DEBUG oslo_vmware.api [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242281, 'name': Rename_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.108238] env[69994]: DEBUG oslo_vmware.api [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': task-3242282, 'name': Rename_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.113352] env[69994]: DEBUG oslo_vmware.api [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242284, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.113818] env[69994]: DEBUG oslo_vmware.api [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242283, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.582390] env[69994]: DEBUG oslo_vmware.api [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242281, 'name': Rename_Task, 'duration_secs': 0.149672} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.582843] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1001.584056] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0176c780-3629-4d75-8915-6095d51ec9f9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.589369] env[69994]: DEBUG oslo_vmware.api [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for the task: (returnval){ [ 1001.589369] env[69994]: value = "task-3242285" [ 1001.589369] env[69994]: _type = "Task" [ 1001.589369] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.607826] env[69994]: DEBUG oslo_vmware.api [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': task-3242282, 'name': Rename_Task, 'duration_secs': 0.22794} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.616339] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1001.616610] env[69994]: DEBUG oslo_vmware.api [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242284, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.174144} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.616861] env[69994]: DEBUG oslo_vmware.api [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242283, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068118} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.617125] env[69994]: DEBUG oslo_vmware.api [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242285, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.617345] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fd62a882-f241-4f32-9d83-45e92b331cec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.618929] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1001.619303] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1001.620099] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f454c0f1-048b-4b57-ba3e-ba6804fceb83 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.623075] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ef704d9-8dc8-4f8a-a5ae-064eadeac0d1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.645568] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] 8f5a5852-cd78-434f-b413-3cc2314575bb/8f5a5852-cd78-434f-b413-3cc2314575bb.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1001.646694] env[69994]: ERROR nova.scheduler.client.report [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] [req-426d264d-1b8d-4a30-8169-b227d28b0bc7] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 92ce3c95-4efe-4d04-802b-6b187afc5aa7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-426d264d-1b8d-4a30-8169-b227d28b0bc7"}]} [ 1001.656369] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d6d578f-d931-4376-b36d-c46ddb112f1d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.672388] env[69994]: DEBUG oslo_vmware.api [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Waiting for the task: (returnval){ [ 1001.672388] env[69994]: value = "task-3242286" [ 1001.672388] env[69994]: _type = "Task" [ 1001.672388] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.680863] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] 290e8749-6860-4303-b966-65d2efee5499/290e8749-6860-4303-b966-65d2efee5499.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1001.681417] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b9492c50-d169-43a8-ba21-6949573bcccb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.696210] env[69994]: DEBUG nova.scheduler.client.report [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Refreshing inventories for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1001.703560] env[69994]: DEBUG oslo_vmware.api [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1001.703560] env[69994]: value = "task-3242287" [ 1001.703560] env[69994]: _type = "Task" [ 1001.703560] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.706657] env[69994]: DEBUG oslo_vmware.api [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': task-3242286, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.710281] env[69994]: DEBUG oslo_vmware.api [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1001.710281] env[69994]: value = "task-3242288" [ 1001.710281] env[69994]: _type = "Task" [ 1001.710281] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.714012] env[69994]: DEBUG nova.scheduler.client.report [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Updating ProviderTree inventory for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1001.714227] env[69994]: DEBUG nova.compute.provider_tree [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1001.719154] env[69994]: DEBUG oslo_vmware.api [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242287, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.723131] env[69994]: DEBUG oslo_vmware.api [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242288, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.728380] env[69994]: DEBUG nova.scheduler.client.report [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Refreshing aggregate associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1001.745229] env[69994]: DEBUG nova.scheduler.client.report [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Refreshing trait associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1001.984406] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74f33dc4-273e-4de9-bcb3-39f431da9bc9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.992016] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96d55e06-d5c5-445b-ac9f-770029ee1051 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.027562] env[69994]: DEBUG oslo_concurrency.lockutils [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "47e80abc-2f7e-432c-bd2f-3064841401fc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1002.027821] env[69994]: DEBUG oslo_concurrency.lockutils [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "47e80abc-2f7e-432c-bd2f-3064841401fc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1002.030044] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd23467e-dea4-4431-8864-57820b533e90 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.037733] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-402d95d7-0cff-4037-83e2-8687282b8abb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.055083] env[69994]: DEBUG nova.compute.provider_tree [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1002.107304] env[69994]: DEBUG oslo_vmware.api [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242285, 'name': PowerOnVM_Task, 'duration_secs': 0.485422} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.107515] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1002.190916] env[69994]: DEBUG oslo_vmware.api [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': task-3242286, 'name': PowerOnVM_Task, 'duration_secs': 0.465504} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.191247] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1002.191494] env[69994]: DEBUG nova.compute.manager [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1002.192261] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6053fc5-387e-4466-9809-21d314f13462 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.206516] env[69994]: DEBUG nova.compute.manager [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1002.207343] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbaf0a7c-634a-4486-9a02-2433b3dfc203 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.222416] env[69994]: DEBUG oslo_vmware.api [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242287, 'name': ReconfigVM_Task, 'duration_secs': 0.312443} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.222897] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Reconfigured VM instance instance-00000052 to attach disk [datastore1] 8f5a5852-cd78-434f-b413-3cc2314575bb/8f5a5852-cd78-434f-b413-3cc2314575bb.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1002.225962] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2871f691-89a2-4815-8af7-9ce19fc5b828 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.227427] env[69994]: DEBUG oslo_vmware.api [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242288, 'name': ReconfigVM_Task, 'duration_secs': 0.290304} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.227671] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Reconfigured VM instance instance-00000051 to attach disk [datastore2] 290e8749-6860-4303-b966-65d2efee5499/290e8749-6860-4303-b966-65d2efee5499.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1002.228436] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-238ab6fe-2dd6-4dae-8deb-37fce698f5d7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.232348] env[69994]: DEBUG oslo_vmware.api [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1002.232348] env[69994]: value = "task-3242289" [ 1002.232348] env[69994]: _type = "Task" [ 1002.232348] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.237288] env[69994]: DEBUG oslo_vmware.api [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1002.237288] env[69994]: value = "task-3242290" [ 1002.237288] env[69994]: _type = "Task" [ 1002.237288] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.243008] env[69994]: DEBUG oslo_vmware.api [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242289, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.249097] env[69994]: DEBUG oslo_vmware.api [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242290, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.533370] env[69994]: DEBUG nova.compute.manager [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1002.558203] env[69994]: DEBUG nova.scheduler.client.report [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1002.710235] env[69994]: DEBUG oslo_concurrency.lockutils [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1002.731155] env[69994]: DEBUG oslo_concurrency.lockutils [None req-862c346d-a59f-43c8-bc7e-96e5f335b9dd tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Lock "6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 32.782s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1002.746043] env[69994]: DEBUG oslo_vmware.api [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242289, 'name': Rename_Task, 'duration_secs': 0.166665} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.749154] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1002.749430] env[69994]: DEBUG oslo_vmware.api [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242290, 'name': Rename_Task, 'duration_secs': 0.149062} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.750339] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4da2917d-c92f-4aee-adb5-3bb0ed28edef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.752014] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1002.752524] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7e5b1e8a-1471-4d36-85f1-3ed7e951af0f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.758636] env[69994]: DEBUG oslo_vmware.api [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1002.758636] env[69994]: value = "task-3242292" [ 1002.758636] env[69994]: _type = "Task" [ 1002.758636] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.760097] env[69994]: DEBUG oslo_vmware.api [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1002.760097] env[69994]: value = "task-3242293" [ 1002.760097] env[69994]: _type = "Task" [ 1002.760097] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.770376] env[69994]: DEBUG oslo_vmware.api [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242293, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.773163] env[69994]: DEBUG oslo_vmware.api [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242292, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.056480] env[69994]: DEBUG oslo_concurrency.lockutils [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.063496] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.078s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1003.063990] env[69994]: DEBUG nova.compute.manager [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1003.066609] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.383s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1003.275462] env[69994]: DEBUG oslo_vmware.api [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242293, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.278686] env[69994]: DEBUG oslo_vmware.api [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242292, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.315240] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7125dfc4-dc4a-429e-ae7a-f4a25c003667 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Acquiring lock "095e75b1-7806-4d1d-ab9e-49735f7aa0f3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.315519] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7125dfc4-dc4a-429e-ae7a-f4a25c003667 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Lock "095e75b1-7806-4d1d-ab9e-49735f7aa0f3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1003.315730] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7125dfc4-dc4a-429e-ae7a-f4a25c003667 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Acquiring lock "095e75b1-7806-4d1d-ab9e-49735f7aa0f3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.315972] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7125dfc4-dc4a-429e-ae7a-f4a25c003667 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Lock "095e75b1-7806-4d1d-ab9e-49735f7aa0f3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1003.316743] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7125dfc4-dc4a-429e-ae7a-f4a25c003667 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Lock "095e75b1-7806-4d1d-ab9e-49735f7aa0f3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1003.319256] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-700977df-f9cb-498e-afa7-526b5cf84e85 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.322502] env[69994]: INFO nova.compute.manager [None req-7125dfc4-dc4a-429e-ae7a-f4a25c003667 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Terminating instance [ 1003.328639] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95288ddd-812c-416d-be72-f5d62a349e32 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.361590] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f5b42ea-5e54-4211-b12c-0be1a48bdd30 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.370195] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cbbba84-8dd8-4074-b5be-913e7a92754a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.384322] env[69994]: DEBUG nova.compute.provider_tree [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1003.572219] env[69994]: DEBUG nova.compute.utils [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1003.574621] env[69994]: DEBUG nova.compute.manager [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1003.574794] env[69994]: DEBUG nova.network.neutron [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1003.630185] env[69994]: DEBUG nova.policy [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b7376c23eabf4805bb118eebe2ce5bca', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '689575b37ef346ec99e64166e90e22d6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1003.774354] env[69994]: DEBUG oslo_vmware.api [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242292, 'name': PowerOnVM_Task, 'duration_secs': 0.566593} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.775092] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1003.775393] env[69994]: INFO nova.compute.manager [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Took 8.97 seconds to spawn the instance on the hypervisor. [ 1003.775641] env[69994]: DEBUG nova.compute.manager [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1003.776497] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49829166-94f4-4541-8116-9e195c217122 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.782009] env[69994]: DEBUG oslo_vmware.api [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242293, 'name': PowerOnVM_Task, 'duration_secs': 0.592823} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.782634] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1003.782932] env[69994]: INFO nova.compute.manager [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Took 11.35 seconds to spawn the instance on the hypervisor. [ 1003.783174] env[69994]: DEBUG nova.compute.manager [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1003.783957] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-493982bd-e628-470c-a01f-2cfccc4a929d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.827095] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7125dfc4-dc4a-429e-ae7a-f4a25c003667 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Acquiring lock "refresh_cache-095e75b1-7806-4d1d-ab9e-49735f7aa0f3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.827300] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7125dfc4-dc4a-429e-ae7a-f4a25c003667 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Acquired lock "refresh_cache-095e75b1-7806-4d1d-ab9e-49735f7aa0f3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1003.827490] env[69994]: DEBUG nova.network.neutron [None req-7125dfc4-dc4a-429e-ae7a-f4a25c003667 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1003.888512] env[69994]: DEBUG nova.scheduler.client.report [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1003.892199] env[69994]: DEBUG nova.network.neutron [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Successfully created port: e9053dbb-d79a-4cac-a034-fe85d0ed9832 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1003.914302] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53893957-422e-4e49-99de-b222c30db6f7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.922126] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bace6308-f077-4fd3-8342-cf694b584744 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Suspending the VM {{(pid=69994) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1003.923138] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-87ba6e4e-ff39-462f-b4d1-c0ecdc379e0a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.931923] env[69994]: DEBUG oslo_vmware.api [None req-bace6308-f077-4fd3-8342-cf694b584744 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for the task: (returnval){ [ 1003.931923] env[69994]: value = "task-3242294" [ 1003.931923] env[69994]: _type = "Task" [ 1003.931923] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.940907] env[69994]: DEBUG oslo_vmware.api [None req-bace6308-f077-4fd3-8342-cf694b584744 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242294, 'name': SuspendVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.075685] env[69994]: DEBUG nova.compute.manager [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1004.301658] env[69994]: INFO nova.compute.manager [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Took 28.69 seconds to build instance. [ 1004.309153] env[69994]: INFO nova.compute.manager [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Took 30.78 seconds to build instance. [ 1004.395934] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.329s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1004.396229] env[69994]: INFO nova.compute.manager [None req-8c1a6831-b721-41b1-ad6b-0af339ea2627 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Successfully reverted task state from rebuilding on failure for instance. [ 1004.402333] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0a7c5e75-d6a6-447a-be95-b4c6981b8e1f tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.164s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1004.402568] env[69994]: DEBUG nova.objects.instance [None req-0a7c5e75-d6a6-447a-be95-b4c6981b8e1f tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lazy-loading 'resources' on Instance uuid 87473dd1-458d-4ef4-a1bd-7e653e509ea4 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1004.442386] env[69994]: DEBUG oslo_vmware.api [None req-bace6308-f077-4fd3-8342-cf694b584744 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242294, 'name': SuspendVM_Task} progress is 70%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.612543] env[69994]: DEBUG nova.network.neutron [None req-7125dfc4-dc4a-429e-ae7a-f4a25c003667 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1004.760472] env[69994]: DEBUG nova.network.neutron [None req-7125dfc4-dc4a-429e-ae7a-f4a25c003667 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1004.807825] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d6fb4ac3-8a15-4dd6-b5e9-015531fabc57 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "8f5a5852-cd78-434f-b413-3cc2314575bb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.205s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1004.811374] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7422ba9c-b574-47dd-8325-b1abb4482112 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "290e8749-6860-4303-b966-65d2efee5499" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.296s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1004.942212] env[69994]: DEBUG oslo_vmware.api [None req-bace6308-f077-4fd3-8342-cf694b584744 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242294, 'name': SuspendVM_Task, 'duration_secs': 0.611143} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.944747] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-bace6308-f077-4fd3-8342-cf694b584744 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Suspended the VM {{(pid=69994) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1004.944936] env[69994]: DEBUG nova.compute.manager [None req-bace6308-f077-4fd3-8342-cf694b584744 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1004.945898] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25e61045-1020-4569-87f7-44c932981793 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.086104] env[69994]: DEBUG nova.compute.manager [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1005.106409] env[69994]: DEBUG nova.virt.hardware [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1005.106695] env[69994]: DEBUG nova.virt.hardware [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1005.106855] env[69994]: DEBUG nova.virt.hardware [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1005.107058] env[69994]: DEBUG nova.virt.hardware [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1005.107217] env[69994]: DEBUG nova.virt.hardware [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1005.107365] env[69994]: DEBUG nova.virt.hardware [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1005.107571] env[69994]: DEBUG nova.virt.hardware [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1005.107728] env[69994]: DEBUG nova.virt.hardware [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1005.107892] env[69994]: DEBUG nova.virt.hardware [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1005.108069] env[69994]: DEBUG nova.virt.hardware [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1005.108243] env[69994]: DEBUG nova.virt.hardware [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1005.109208] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-843e2e0d-6a85-4afc-9676-4547498c0440 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.119371] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b00d7f0-f103-4655-a7fd-9b5866893b7a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.137806] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b721679f-e8de-4af6-8902-93ce605c25d7 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Volume attach. Driver type: vmdk {{(pid=69994) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1005.138048] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b721679f-e8de-4af6-8902-93ce605c25d7 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647972', 'volume_id': 'aca9e875-f8b1-40fd-8a48-9f448b62ea3b', 'name': 'volume-aca9e875-f8b1-40fd-8a48-9f448b62ea3b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ef410b09-8686-409e-8391-d50cd0e0df04', 'attached_at': '', 'detached_at': '', 'volume_id': 'aca9e875-f8b1-40fd-8a48-9f448b62ea3b', 'serial': 'aca9e875-f8b1-40fd-8a48-9f448b62ea3b'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1005.143738] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3579b36-47b0-4d02-8b38-d517c6f18c62 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.160432] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cb06a56-cad1-488d-bcf8-a4b15931e7f9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.166273] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cd712d2-440d-4a31-8df0-e124c567e4c2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.187718] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a7f984e-dcb5-4665-9076-c44e640901f0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.198193] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b721679f-e8de-4af6-8902-93ce605c25d7 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] volume-aca9e875-f8b1-40fd-8a48-9f448b62ea3b/volume-aca9e875-f8b1-40fd-8a48-9f448b62ea3b.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1005.199017] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-479176be-6df9-440f-a0f7-3e42c49ef8cb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.241536] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a547646-7e38-4311-b8e0-c0f0b8aad746 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.244244] env[69994]: DEBUG oslo_vmware.api [None req-b721679f-e8de-4af6-8902-93ce605c25d7 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 1005.244244] env[69994]: value = "task-3242295" [ 1005.244244] env[69994]: _type = "Task" [ 1005.244244] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.251557] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b62722fb-3575-44ec-9dd4-88da7affc24a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.258800] env[69994]: DEBUG oslo_vmware.api [None req-b721679f-e8de-4af6-8902-93ce605c25d7 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242295, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.270080] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7125dfc4-dc4a-429e-ae7a-f4a25c003667 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Releasing lock "refresh_cache-095e75b1-7806-4d1d-ab9e-49735f7aa0f3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1005.270509] env[69994]: DEBUG nova.compute.manager [None req-7125dfc4-dc4a-429e-ae7a-f4a25c003667 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1005.270702] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7125dfc4-dc4a-429e-ae7a-f4a25c003667 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1005.271268] env[69994]: DEBUG nova.compute.provider_tree [None req-0a7c5e75-d6a6-447a-be95-b4c6981b8e1f tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1005.272999] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e19d8c36-1e07-4eb8-9b2f-eff75cae47d7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.279712] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7125dfc4-dc4a-429e-ae7a-f4a25c003667 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1005.280010] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e82db35c-e07d-4517-a752-da2ffc79593a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.285032] env[69994]: DEBUG oslo_vmware.api [None req-7125dfc4-dc4a-429e-ae7a-f4a25c003667 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Waiting for the task: (returnval){ [ 1005.285032] env[69994]: value = "task-3242296" [ 1005.285032] env[69994]: _type = "Task" [ 1005.285032] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.287013] env[69994]: DEBUG nova.compute.manager [req-1ea6e704-16c2-49e1-a708-7b98fb98c462 req-28ad17a4-7b22-49de-b274-858349db30d7 service nova] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Received event network-vif-plugged-e9053dbb-d79a-4cac-a034-fe85d0ed9832 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1005.287216] env[69994]: DEBUG oslo_concurrency.lockutils [req-1ea6e704-16c2-49e1-a708-7b98fb98c462 req-28ad17a4-7b22-49de-b274-858349db30d7 service nova] Acquiring lock "eed22b8d-f8ea-4b90-8730-61d9a89ddfaa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1005.287432] env[69994]: DEBUG oslo_concurrency.lockutils [req-1ea6e704-16c2-49e1-a708-7b98fb98c462 req-28ad17a4-7b22-49de-b274-858349db30d7 service nova] Lock "eed22b8d-f8ea-4b90-8730-61d9a89ddfaa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1005.287595] env[69994]: DEBUG oslo_concurrency.lockutils [req-1ea6e704-16c2-49e1-a708-7b98fb98c462 req-28ad17a4-7b22-49de-b274-858349db30d7 service nova] Lock "eed22b8d-f8ea-4b90-8730-61d9a89ddfaa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1005.287773] env[69994]: DEBUG nova.compute.manager [req-1ea6e704-16c2-49e1-a708-7b98fb98c462 req-28ad17a4-7b22-49de-b274-858349db30d7 service nova] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] No waiting events found dispatching network-vif-plugged-e9053dbb-d79a-4cac-a034-fe85d0ed9832 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1005.287935] env[69994]: WARNING nova.compute.manager [req-1ea6e704-16c2-49e1-a708-7b98fb98c462 req-28ad17a4-7b22-49de-b274-858349db30d7 service nova] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Received unexpected event network-vif-plugged-e9053dbb-d79a-4cac-a034-fe85d0ed9832 for instance with vm_state building and task_state spawning. [ 1005.298209] env[69994]: DEBUG oslo_vmware.api [None req-7125dfc4-dc4a-429e-ae7a-f4a25c003667 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': task-3242296, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.366793] env[69994]: DEBUG nova.network.neutron [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Successfully updated port: e9053dbb-d79a-4cac-a034-fe85d0ed9832 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1005.501576] env[69994]: DEBUG oslo_concurrency.lockutils [None req-63359145-a7db-48cf-b966-8c322c3dbd88 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "290e8749-6860-4303-b966-65d2efee5499" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1005.502152] env[69994]: DEBUG oslo_concurrency.lockutils [None req-63359145-a7db-48cf-b966-8c322c3dbd88 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "290e8749-6860-4303-b966-65d2efee5499" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1005.502152] env[69994]: DEBUG nova.compute.manager [None req-63359145-a7db-48cf-b966-8c322c3dbd88 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1005.502921] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97c19fa6-9a4f-48ec-98bf-44c17381186c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.510159] env[69994]: DEBUG nova.compute.manager [None req-63359145-a7db-48cf-b966-8c322c3dbd88 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69994) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1005.510755] env[69994]: DEBUG nova.objects.instance [None req-63359145-a7db-48cf-b966-8c322c3dbd88 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lazy-loading 'flavor' on Instance uuid 290e8749-6860-4303-b966-65d2efee5499 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1005.669577] env[69994]: INFO nova.compute.manager [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Rebuilding instance [ 1005.707063] env[69994]: DEBUG nova.compute.manager [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1005.708053] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aa0dc06-c476-495e-b757-b9e1e8237382 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.755186] env[69994]: DEBUG oslo_vmware.api [None req-b721679f-e8de-4af6-8902-93ce605c25d7 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242295, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.777184] env[69994]: DEBUG nova.scheduler.client.report [None req-0a7c5e75-d6a6-447a-be95-b4c6981b8e1f tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1005.795355] env[69994]: DEBUG oslo_vmware.api [None req-7125dfc4-dc4a-429e-ae7a-f4a25c003667 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': task-3242296, 'name': PowerOffVM_Task, 'duration_secs': 0.143774} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.796166] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7125dfc4-dc4a-429e-ae7a-f4a25c003667 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1005.796345] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7125dfc4-dc4a-429e-ae7a-f4a25c003667 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1005.796590] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-05fe44c1-c7f0-450d-a6c6-61e544f9eac6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.825070] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7125dfc4-dc4a-429e-ae7a-f4a25c003667 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1005.825327] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7125dfc4-dc4a-429e-ae7a-f4a25c003667 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1005.825709] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-7125dfc4-dc4a-429e-ae7a-f4a25c003667 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Deleting the datastore file [datastore1] 095e75b1-7806-4d1d-ab9e-49735f7aa0f3 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1005.825912] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d94b88a2-ef51-49a7-9199-95d6910dd4f5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.833325] env[69994]: DEBUG oslo_vmware.api [None req-7125dfc4-dc4a-429e-ae7a-f4a25c003667 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Waiting for the task: (returnval){ [ 1005.833325] env[69994]: value = "task-3242298" [ 1005.833325] env[69994]: _type = "Task" [ 1005.833325] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.840923] env[69994]: DEBUG oslo_vmware.api [None req-7125dfc4-dc4a-429e-ae7a-f4a25c003667 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': task-3242298, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.870010] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Acquiring lock "refresh_cache-eed22b8d-f8ea-4b90-8730-61d9a89ddfaa" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.870204] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Acquired lock "refresh_cache-eed22b8d-f8ea-4b90-8730-61d9a89ddfaa" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1005.870357] env[69994]: DEBUG nova.network.neutron [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1006.254378] env[69994]: DEBUG oslo_vmware.api [None req-b721679f-e8de-4af6-8902-93ce605c25d7 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242295, 'name': ReconfigVM_Task, 'duration_secs': 0.853591} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.254787] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b721679f-e8de-4af6-8902-93ce605c25d7 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Reconfigured VM instance instance-00000040 to attach disk [datastore1] volume-aca9e875-f8b1-40fd-8a48-9f448b62ea3b/volume-aca9e875-f8b1-40fd-8a48-9f448b62ea3b.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1006.259306] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c5fc4b00-bb82-4126-8b56-fa16ffdcb962 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.274784] env[69994]: DEBUG oslo_vmware.api [None req-b721679f-e8de-4af6-8902-93ce605c25d7 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 1006.274784] env[69994]: value = "task-3242299" [ 1006.274784] env[69994]: _type = "Task" [ 1006.274784] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.281750] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0a7c5e75-d6a6-447a-be95-b4c6981b8e1f tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.879s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1006.287278] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc7f918b-bc02-4759-8bdd-8945b25214f8 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.312s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1006.287520] env[69994]: DEBUG nova.objects.instance [None req-fc7f918b-bc02-4759-8bdd-8945b25214f8 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Lazy-loading 'resources' on Instance uuid c14851d2-66c5-4865-ae66-abbe303f0c31 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1006.288761] env[69994]: DEBUG oslo_vmware.api [None req-b721679f-e8de-4af6-8902-93ce605c25d7 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242299, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.303145] env[69994]: INFO nova.scheduler.client.report [None req-0a7c5e75-d6a6-447a-be95-b4c6981b8e1f tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Deleted allocations for instance 87473dd1-458d-4ef4-a1bd-7e653e509ea4 [ 1006.343073] env[69994]: DEBUG oslo_vmware.api [None req-7125dfc4-dc4a-429e-ae7a-f4a25c003667 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Task: {'id': task-3242298, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.126698} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.343366] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-7125dfc4-dc4a-429e-ae7a-f4a25c003667 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1006.343572] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7125dfc4-dc4a-429e-ae7a-f4a25c003667 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1006.343769] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7125dfc4-dc4a-429e-ae7a-f4a25c003667 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1006.343959] env[69994]: INFO nova.compute.manager [None req-7125dfc4-dc4a-429e-ae7a-f4a25c003667 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Took 1.07 seconds to destroy the instance on the hypervisor. [ 1006.344238] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7125dfc4-dc4a-429e-ae7a-f4a25c003667 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1006.345042] env[69994]: DEBUG nova.compute.manager [-] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1006.345042] env[69994]: DEBUG nova.network.neutron [-] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1006.360481] env[69994]: DEBUG nova.network.neutron [-] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1006.404620] env[69994]: DEBUG nova.network.neutron [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1006.503796] env[69994]: INFO nova.compute.manager [None req-d36a9ed6-c125-4abd-a277-cfa1c7d8abb6 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Resuming [ 1006.504448] env[69994]: DEBUG nova.objects.instance [None req-d36a9ed6-c125-4abd-a277-cfa1c7d8abb6 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Lazy-loading 'flavor' on Instance uuid 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1006.517029] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-63359145-a7db-48cf-b966-8c322c3dbd88 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1006.517029] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cec99aeb-8dc2-4986-8623-4f8a16b57dd8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.524841] env[69994]: DEBUG oslo_vmware.api [None req-63359145-a7db-48cf-b966-8c322c3dbd88 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1006.524841] env[69994]: value = "task-3242300" [ 1006.524841] env[69994]: _type = "Task" [ 1006.524841] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.532916] env[69994]: DEBUG oslo_vmware.api [None req-63359145-a7db-48cf-b966-8c322c3dbd88 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242300, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.541184] env[69994]: DEBUG nova.network.neutron [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Updating instance_info_cache with network_info: [{"id": "e9053dbb-d79a-4cac-a034-fe85d0ed9832", "address": "fa:16:3e:72:43:00", "network": {"id": "147b280b-2b1f-45b2-afc8-1e6a17471572", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1947184088-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "689575b37ef346ec99e64166e90e22d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e839c46-1ae9-43b7-9518-8f18f48100dd", "external-id": "nsx-vlan-transportzone-666", "segmentation_id": 666, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9053dbb-d7", "ovs_interfaceid": "e9053dbb-d79a-4cac-a034-fe85d0ed9832", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1006.722800] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1006.723268] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-009dec87-3bc0-4d0b-b368-91564dd6cee0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.730914] env[69994]: DEBUG oslo_vmware.api [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1006.730914] env[69994]: value = "task-3242301" [ 1006.730914] env[69994]: _type = "Task" [ 1006.730914] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.738846] env[69994]: DEBUG oslo_vmware.api [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242301, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.783619] env[69994]: DEBUG oslo_vmware.api [None req-b721679f-e8de-4af6-8902-93ce605c25d7 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242299, 'name': ReconfigVM_Task, 'duration_secs': 0.197826} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.783937] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b721679f-e8de-4af6-8902-93ce605c25d7 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647972', 'volume_id': 'aca9e875-f8b1-40fd-8a48-9f448b62ea3b', 'name': 'volume-aca9e875-f8b1-40fd-8a48-9f448b62ea3b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ef410b09-8686-409e-8391-d50cd0e0df04', 'attached_at': '', 'detached_at': '', 'volume_id': 'aca9e875-f8b1-40fd-8a48-9f448b62ea3b', 'serial': 'aca9e875-f8b1-40fd-8a48-9f448b62ea3b'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1006.812857] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0a7c5e75-d6a6-447a-be95-b4c6981b8e1f tempest-ServersAdminTestJSON-1865879220 tempest-ServersAdminTestJSON-1865879220-project-member] Lock "87473dd1-458d-4ef4-a1bd-7e653e509ea4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.057s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1006.863035] env[69994]: DEBUG nova.network.neutron [-] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1006.985235] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43135ddc-1c70-407c-b0b3-4556d499aa30 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.993493] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88003cf1-f937-474d-bcbc-ea11c9244758 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.026923] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76e30ec4-d7e1-4535-a0a3-75051c1c3457 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.037594] env[69994]: DEBUG oslo_vmware.api [None req-63359145-a7db-48cf-b966-8c322c3dbd88 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242300, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.040792] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61c69d50-f9cb-42be-9b59-63ed1a501943 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.045053] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Releasing lock "refresh_cache-eed22b8d-f8ea-4b90-8730-61d9a89ddfaa" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1007.045360] env[69994]: DEBUG nova.compute.manager [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Instance network_info: |[{"id": "e9053dbb-d79a-4cac-a034-fe85d0ed9832", "address": "fa:16:3e:72:43:00", "network": {"id": "147b280b-2b1f-45b2-afc8-1e6a17471572", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1947184088-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "689575b37ef346ec99e64166e90e22d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e839c46-1ae9-43b7-9518-8f18f48100dd", "external-id": "nsx-vlan-transportzone-666", "segmentation_id": 666, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9053dbb-d7", "ovs_interfaceid": "e9053dbb-d79a-4cac-a034-fe85d0ed9832", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1007.046009] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:72:43:00', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5e839c46-1ae9-43b7-9518-8f18f48100dd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e9053dbb-d79a-4cac-a034-fe85d0ed9832', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1007.053260] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Creating folder: Project (689575b37ef346ec99e64166e90e22d6). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1007.053903] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8a06089f-e1f7-4566-b6c6-65483e3f6b76 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.063981] env[69994]: DEBUG nova.compute.provider_tree [None req-fc7f918b-bc02-4759-8bdd-8945b25214f8 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1007.073952] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Created folder: Project (689575b37ef346ec99e64166e90e22d6) in parent group-v647729. [ 1007.074164] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Creating folder: Instances. Parent ref: group-v647973. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1007.074385] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-24ed88b9-e13b-41fc-9f52-b0360f295180 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.083035] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Created folder: Instances in parent group-v647973. [ 1007.083258] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1007.083615] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1007.083615] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2d8d4269-2593-4f17-9cd1-e438e6250c41 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.102232] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1007.102232] env[69994]: value = "task-3242304" [ 1007.102232] env[69994]: _type = "Task" [ 1007.102232] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.109641] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242304, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.240758] env[69994]: DEBUG oslo_vmware.api [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242301, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.312716] env[69994]: DEBUG nova.compute.manager [req-b81b5d14-52bf-4828-9ac6-1029116adace req-a7754617-6238-4ff7-a303-e95cfb0e8595 service nova] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Received event network-changed-e9053dbb-d79a-4cac-a034-fe85d0ed9832 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1007.312968] env[69994]: DEBUG nova.compute.manager [req-b81b5d14-52bf-4828-9ac6-1029116adace req-a7754617-6238-4ff7-a303-e95cfb0e8595 service nova] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Refreshing instance network info cache due to event network-changed-e9053dbb-d79a-4cac-a034-fe85d0ed9832. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1007.313344] env[69994]: DEBUG oslo_concurrency.lockutils [req-b81b5d14-52bf-4828-9ac6-1029116adace req-a7754617-6238-4ff7-a303-e95cfb0e8595 service nova] Acquiring lock "refresh_cache-eed22b8d-f8ea-4b90-8730-61d9a89ddfaa" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.313431] env[69994]: DEBUG oslo_concurrency.lockutils [req-b81b5d14-52bf-4828-9ac6-1029116adace req-a7754617-6238-4ff7-a303-e95cfb0e8595 service nova] Acquired lock "refresh_cache-eed22b8d-f8ea-4b90-8730-61d9a89ddfaa" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1007.313533] env[69994]: DEBUG nova.network.neutron [req-b81b5d14-52bf-4828-9ac6-1029116adace req-a7754617-6238-4ff7-a303-e95cfb0e8595 service nova] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Refreshing network info cache for port e9053dbb-d79a-4cac-a034-fe85d0ed9832 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1007.365162] env[69994]: INFO nova.compute.manager [-] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Took 1.02 seconds to deallocate network for instance. [ 1007.541771] env[69994]: DEBUG oslo_vmware.api [None req-63359145-a7db-48cf-b966-8c322c3dbd88 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242300, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.567748] env[69994]: DEBUG nova.scheduler.client.report [None req-fc7f918b-bc02-4759-8bdd-8945b25214f8 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1007.613870] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242304, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.741733] env[69994]: DEBUG oslo_vmware.api [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242301, 'name': PowerOffVM_Task, 'duration_secs': 0.942511} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.741932] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1007.742202] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1007.742924] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e64741d-cfff-430b-be87-5293a9527217 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.749519] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1007.749736] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-93bed672-a8ba-4dff-af14-87b64f19ee9b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.806652] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1007.806876] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1007.807069] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Deleting the datastore file [datastore1] 8f5a5852-cd78-434f-b413-3cc2314575bb {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1007.807335] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-82283d8a-b9c1-4b6c-83a9-ad423cf8119f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.818347] env[69994]: DEBUG oslo_vmware.api [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1007.818347] env[69994]: value = "task-3242306" [ 1007.818347] env[69994]: _type = "Task" [ 1007.818347] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.822272] env[69994]: DEBUG nova.objects.instance [None req-b721679f-e8de-4af6-8902-93ce605c25d7 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lazy-loading 'flavor' on Instance uuid ef410b09-8686-409e-8391-d50cd0e0df04 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1007.828068] env[69994]: DEBUG oslo_vmware.api [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242306, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.873506] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7125dfc4-dc4a-429e-ae7a-f4a25c003667 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1008.040952] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d36a9ed6-c125-4abd-a277-cfa1c7d8abb6 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Acquiring lock "refresh_cache-6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.041230] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d36a9ed6-c125-4abd-a277-cfa1c7d8abb6 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Acquired lock "refresh_cache-6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1008.041424] env[69994]: DEBUG nova.network.neutron [None req-d36a9ed6-c125-4abd-a277-cfa1c7d8abb6 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1008.042562] env[69994]: DEBUG oslo_vmware.api [None req-63359145-a7db-48cf-b966-8c322c3dbd88 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242300, 'name': PowerOffVM_Task, 'duration_secs': 1.153766} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.045199] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-63359145-a7db-48cf-b966-8c322c3dbd88 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1008.045396] env[69994]: DEBUG nova.compute.manager [None req-63359145-a7db-48cf-b966-8c322c3dbd88 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1008.046386] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df9aa96e-9320-4349-a216-42a149d1822a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.072462] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc7f918b-bc02-4759-8bdd-8945b25214f8 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.785s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1008.074686] env[69994]: DEBUG oslo_concurrency.lockutils [None req-796ba68f-0266-4631-8f8f-df1518697467 tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.508s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1008.074909] env[69994]: DEBUG nova.objects.instance [None req-796ba68f-0266-4631-8f8f-df1518697467 tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Lazy-loading 'resources' on Instance uuid 0d42c1c7-2ac1-44f3-8311-929f141e0a65 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1008.112735] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242304, 'name': CreateVM_Task, 'duration_secs': 0.642007} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.112927] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1008.113723] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.113926] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1008.114436] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1008.114522] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f66bc94-f058-4fbb-af75-73251795ed18 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.120156] env[69994]: DEBUG oslo_vmware.api [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Waiting for the task: (returnval){ [ 1008.120156] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52ca7c6d-11f5-4b33-da58-fa040d143322" [ 1008.120156] env[69994]: _type = "Task" [ 1008.120156] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.129307] env[69994]: DEBUG oslo_vmware.api [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52ca7c6d-11f5-4b33-da58-fa040d143322, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.151653] env[69994]: DEBUG nova.network.neutron [req-b81b5d14-52bf-4828-9ac6-1029116adace req-a7754617-6238-4ff7-a303-e95cfb0e8595 service nova] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Updated VIF entry in instance network info cache for port e9053dbb-d79a-4cac-a034-fe85d0ed9832. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1008.152054] env[69994]: DEBUG nova.network.neutron [req-b81b5d14-52bf-4828-9ac6-1029116adace req-a7754617-6238-4ff7-a303-e95cfb0e8595 service nova] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Updating instance_info_cache with network_info: [{"id": "e9053dbb-d79a-4cac-a034-fe85d0ed9832", "address": "fa:16:3e:72:43:00", "network": {"id": "147b280b-2b1f-45b2-afc8-1e6a17471572", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1947184088-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "689575b37ef346ec99e64166e90e22d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e839c46-1ae9-43b7-9518-8f18f48100dd", "external-id": "nsx-vlan-transportzone-666", "segmentation_id": 666, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9053dbb-d7", "ovs_interfaceid": "e9053dbb-d79a-4cac-a034-fe85d0ed9832", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1008.331218] env[69994]: DEBUG oslo_vmware.api [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242306, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.128029} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.331531] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b721679f-e8de-4af6-8902-93ce605c25d7 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lock "ef410b09-8686-409e-8391-d50cd0e0df04" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.369s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1008.332292] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1008.332479] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1008.332651] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1008.463819] env[69994]: INFO nova.compute.manager [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Rescuing [ 1008.464128] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquiring lock "refresh_cache-ef410b09-8686-409e-8391-d50cd0e0df04" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.464842] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquired lock "refresh_cache-ef410b09-8686-409e-8391-d50cd0e0df04" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1008.465053] env[69994]: DEBUG nova.network.neutron [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1008.561048] env[69994]: DEBUG oslo_concurrency.lockutils [None req-63359145-a7db-48cf-b966-8c322c3dbd88 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "290e8749-6860-4303-b966-65d2efee5499" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 3.059s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1008.598846] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fc7f918b-bc02-4759-8bdd-8945b25214f8 tempest-ServerActionsV293TestJSON-1892589652 tempest-ServerActionsV293TestJSON-1892589652-project-member] Lock "c14851d2-66c5-4865-ae66-abbe303f0c31" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.967s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1008.632288] env[69994]: DEBUG oslo_vmware.api [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52ca7c6d-11f5-4b33-da58-fa040d143322, 'name': SearchDatastore_Task, 'duration_secs': 0.01018} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.635215] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1008.635977] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1008.636377] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.636645] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1008.636981] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1008.637814] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5dc3c64d-88b4-4c44-8402-546e4be1389f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.647718] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1008.647718] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1008.648425] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-313c9a13-0201-4890-9aa1-9a672b8df3bb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.655708] env[69994]: DEBUG oslo_concurrency.lockutils [req-b81b5d14-52bf-4828-9ac6-1029116adace req-a7754617-6238-4ff7-a303-e95cfb0e8595 service nova] Releasing lock "refresh_cache-eed22b8d-f8ea-4b90-8730-61d9a89ddfaa" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1008.657230] env[69994]: DEBUG oslo_vmware.api [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Waiting for the task: (returnval){ [ 1008.657230] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]524b4bef-742f-bf1f-5c07-fec644656f1f" [ 1008.657230] env[69994]: _type = "Task" [ 1008.657230] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.666939] env[69994]: DEBUG oslo_vmware.api [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]524b4bef-742f-bf1f-5c07-fec644656f1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.840399] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c587cd88-d1af-45d2-a76a-47e1fec5fe54 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.848592] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e931b50-c955-4162-aca7-583440d84c0f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.879106] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d1382d9-d0ea-441f-9ef6-ef6a6f5f96ff {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.883449] env[69994]: DEBUG nova.network.neutron [None req-d36a9ed6-c125-4abd-a277-cfa1c7d8abb6 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Updating instance_info_cache with network_info: [{"id": "e4706905-12e3-43b1-a83a-409585a96042", "address": "fa:16:3e:2d:f6:a9", "network": {"id": "ffc05707-d9b8-4f04-8df9-5c384e0942c5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1300021035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f035f8fbac46483fb4d70f166df319b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4706905-12", "ovs_interfaceid": "e4706905-12e3-43b1-a83a-409585a96042", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1008.890516] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d044d8-597c-4656-9d62-1c32de27b614 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.906326] env[69994]: DEBUG nova.compute.provider_tree [None req-796ba68f-0266-4631-8f8f-df1518697467 tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1009.167764] env[69994]: DEBUG oslo_vmware.api [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]524b4bef-742f-bf1f-5c07-fec644656f1f, 'name': SearchDatastore_Task, 'duration_secs': 0.00963} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.170753] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04207570-18dd-46be-972f-4c2a7dd3bde2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.176621] env[69994]: DEBUG oslo_vmware.api [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Waiting for the task: (returnval){ [ 1009.176621] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]523062f1-de4c-27e8-9e54-c5802b3692dd" [ 1009.176621] env[69994]: _type = "Task" [ 1009.176621] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.184858] env[69994]: DEBUG oslo_vmware.api [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]523062f1-de4c-27e8-9e54-c5802b3692dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.374304] env[69994]: DEBUG nova.virt.hardware [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1009.374627] env[69994]: DEBUG nova.virt.hardware [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1009.374716] env[69994]: DEBUG nova.virt.hardware [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1009.374893] env[69994]: DEBUG nova.virt.hardware [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1009.375470] env[69994]: DEBUG nova.virt.hardware [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1009.375680] env[69994]: DEBUG nova.virt.hardware [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1009.375900] env[69994]: DEBUG nova.virt.hardware [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1009.376078] env[69994]: DEBUG nova.virt.hardware [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1009.376857] env[69994]: DEBUG nova.virt.hardware [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1009.377092] env[69994]: DEBUG nova.virt.hardware [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1009.377574] env[69994]: DEBUG nova.virt.hardware [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1009.378694] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d212d62e-8980-4af0-99e2-e0b08259b3fd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.386837] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d36a9ed6-c125-4abd-a277-cfa1c7d8abb6 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Releasing lock "refresh_cache-6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1009.387798] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5970f13b-6942-40c5-91c0-588eee61c12f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.391348] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d74cd09c-410e-49d8-a929-b6c8890d0697 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.398415] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d36a9ed6-c125-4abd-a277-cfa1c7d8abb6 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Resuming the VM {{(pid=69994) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1009.406147] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aab095b7-9ce0-43df-ac8f-dc3378eba582 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.407942] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:57:80:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '87bbf4e0-9064-4516-b7e7-44973f817205', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cd8d4128-2422-4fb9-989c-0ceb2eb2123a', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1009.416013] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1009.416885] env[69994]: DEBUG nova.scheduler.client.report [None req-796ba68f-0266-4631-8f8f-df1518697467 tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1009.420113] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1009.423576] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a810088b-bcaa-4281-8724-8044d8a2de33 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.442377] env[69994]: DEBUG oslo_vmware.api [None req-d36a9ed6-c125-4abd-a277-cfa1c7d8abb6 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for the task: (returnval){ [ 1009.442377] env[69994]: value = "task-3242307" [ 1009.442377] env[69994]: _type = "Task" [ 1009.442377] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.448029] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1009.448029] env[69994]: value = "task-3242308" [ 1009.448029] env[69994]: _type = "Task" [ 1009.448029] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.456860] env[69994]: DEBUG oslo_vmware.api [None req-d36a9ed6-c125-4abd-a277-cfa1c7d8abb6 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242307, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.460043] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242308, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.562597] env[69994]: DEBUG nova.network.neutron [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Updating instance_info_cache with network_info: [{"id": "37af0480-c14f-4941-b963-b25c22c833b3", "address": "fa:16:3e:57:78:19", "network": {"id": "36dc1c22-7ec0-41a6-a3ca-873fea58c351", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1496166079-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30a9ea2f804f49ec8c5c6861b507454e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37af0480-c1", "ovs_interfaceid": "37af0480-c14f-4941-b963-b25c22c833b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1009.689828] env[69994]: DEBUG oslo_vmware.api [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]523062f1-de4c-27e8-9e54-c5802b3692dd, 'name': SearchDatastore_Task, 'duration_secs': 0.009673} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.690301] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1009.691010] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] eed22b8d-f8ea-4b90-8730-61d9a89ddfaa/eed22b8d-f8ea-4b90-8730-61d9a89ddfaa.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1009.691313] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-325fdbab-1901-4faa-b1ac-d77e5c5e7be0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.698072] env[69994]: DEBUG oslo_vmware.api [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Waiting for the task: (returnval){ [ 1009.698072] env[69994]: value = "task-3242309" [ 1009.698072] env[69994]: _type = "Task" [ 1009.698072] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.706086] env[69994]: DEBUG oslo_vmware.api [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Task: {'id': task-3242309, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.925293] env[69994]: DEBUG oslo_concurrency.lockutils [None req-796ba68f-0266-4631-8f8f-df1518697467 tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.850s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1009.928134] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 17.145s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1009.928416] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1009.928884] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69994) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1009.928884] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.944s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1009.931521] env[69994]: INFO nova.compute.claims [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1009.935525] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f1aecbe-9abf-45cf-baaf-5eb45f813e37 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.950933] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aff7d840-bcb7-4fc6-8978-088e5de5a03d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.959638] env[69994]: DEBUG oslo_vmware.api [None req-d36a9ed6-c125-4abd-a277-cfa1c7d8abb6 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242307, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.970296] env[69994]: INFO nova.scheduler.client.report [None req-796ba68f-0266-4631-8f8f-df1518697467 tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Deleted allocations for instance 0d42c1c7-2ac1-44f3-8311-929f141e0a65 [ 1009.972285] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aedda33-92a3-40e9-b7ea-050ce750aa0c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.981669] env[69994]: DEBUG nova.compute.manager [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1009.982059] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242308, 'name': CreateVM_Task, 'duration_secs': 0.368908} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.983488] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4039b9b-b4b0-4908-a095-80b737fbcffd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.986384] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1009.989210] env[69994]: DEBUG oslo_concurrency.lockutils [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.989389] env[69994]: DEBUG oslo_concurrency.lockutils [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1009.990053] env[69994]: DEBUG oslo_concurrency.lockutils [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1009.990982] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bcd301a6-7df8-407d-80a6-282c07c018ca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.993257] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6c65d12-a15f-4611-a5e5-b8fc50618a4f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.038494] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179585MB free_disk=120GB free_vcpus=48 pci_devices=None {{(pid=69994) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1010.038494] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1010.040221] env[69994]: DEBUG oslo_vmware.api [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1010.040221] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52350c02-6208-e448-b66d-4e6a8e32f169" [ 1010.040221] env[69994]: _type = "Task" [ 1010.040221] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.049535] env[69994]: DEBUG oslo_vmware.api [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52350c02-6208-e448-b66d-4e6a8e32f169, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.065522] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Releasing lock "refresh_cache-ef410b09-8686-409e-8391-d50cd0e0df04" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1010.210035] env[69994]: DEBUG oslo_vmware.api [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Task: {'id': task-3242309, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.483376} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.210360] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] eed22b8d-f8ea-4b90-8730-61d9a89ddfaa/eed22b8d-f8ea-4b90-8730-61d9a89ddfaa.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1010.210579] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1010.210870] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7d471dcb-6f63-4ec9-8ded-1588fdd53677 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.220094] env[69994]: DEBUG oslo_vmware.api [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Waiting for the task: (returnval){ [ 1010.220094] env[69994]: value = "task-3242310" [ 1010.220094] env[69994]: _type = "Task" [ 1010.220094] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.230753] env[69994]: DEBUG oslo_vmware.api [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Task: {'id': task-3242310, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.456843] env[69994]: DEBUG oslo_vmware.api [None req-d36a9ed6-c125-4abd-a277-cfa1c7d8abb6 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242307, 'name': PowerOnVM_Task, 'duration_secs': 0.761589} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.456843] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d36a9ed6-c125-4abd-a277-cfa1c7d8abb6 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Resumed the VM {{(pid=69994) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1010.456843] env[69994]: DEBUG nova.compute.manager [None req-d36a9ed6-c125-4abd-a277-cfa1c7d8abb6 tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1010.456843] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8dd4434-ca29-4257-b60d-9734df412480 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.487611] env[69994]: DEBUG oslo_concurrency.lockutils [None req-796ba68f-0266-4631-8f8f-df1518697467 tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Lock "0d42c1c7-2ac1-44f3-8311-929f141e0a65" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.983s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1010.506284] env[69994]: INFO nova.compute.manager [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] instance snapshotting [ 1010.506487] env[69994]: WARNING nova.compute.manager [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 1010.512174] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97f85c99-bb06-4887-ad60-e8d4f019c575 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.544670] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99dcaa1e-5dde-47ee-85d7-81c649dd6e19 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.554689] env[69994]: DEBUG oslo_vmware.api [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52350c02-6208-e448-b66d-4e6a8e32f169, 'name': SearchDatastore_Task, 'duration_secs': 0.044593} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.557057] env[69994]: DEBUG oslo_concurrency.lockutils [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1010.557360] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1010.557589] env[69994]: DEBUG oslo_concurrency.lockutils [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1010.557831] env[69994]: DEBUG oslo_concurrency.lockutils [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1010.558171] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1010.561063] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-94b20b64-8554-472f-806a-e9589540c277 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.569568] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1010.569776] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1010.570587] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8653dccd-798c-4007-816c-58bff7e3c469 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.576383] env[69994]: DEBUG oslo_vmware.api [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1010.576383] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52cf234d-bf9d-d924-0583-87ab6c141e90" [ 1010.576383] env[69994]: _type = "Task" [ 1010.576383] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.584525] env[69994]: DEBUG oslo_vmware.api [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52cf234d-bf9d-d924-0583-87ab6c141e90, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.729618] env[69994]: DEBUG oslo_vmware.api [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Task: {'id': task-3242310, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071459} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.729893] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1010.730726] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28fe9857-ab42-4e3d-ab96-7fd6818e56f7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.757332] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] eed22b8d-f8ea-4b90-8730-61d9a89ddfaa/eed22b8d-f8ea-4b90-8730-61d9a89ddfaa.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1010.757332] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-53ccf411-78ea-4a2e-9546-36fabb60d8a5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.777553] env[69994]: DEBUG oslo_vmware.api [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Waiting for the task: (returnval){ [ 1010.777553] env[69994]: value = "task-3242311" [ 1010.777553] env[69994]: _type = "Task" [ 1010.777553] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.785772] env[69994]: DEBUG oslo_vmware.api [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Task: {'id': task-3242311, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.065015] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Creating Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1011.065335] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-4801eed9-5fb9-4fb5-a22b-f010e735acb6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.074399] env[69994]: DEBUG oslo_vmware.api [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1011.074399] env[69994]: value = "task-3242312" [ 1011.074399] env[69994]: _type = "Task" [ 1011.074399] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.086911] env[69994]: DEBUG oslo_vmware.api [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242312, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.091557] env[69994]: DEBUG oslo_vmware.api [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52cf234d-bf9d-d924-0583-87ab6c141e90, 'name': SearchDatastore_Task, 'duration_secs': 0.019475} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.092381] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f95eab5-8e0e-4b9b-8b35-e72095ec3dc6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.100390] env[69994]: DEBUG oslo_vmware.api [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1011.100390] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52c79bfc-c24b-5bb6-98fc-c3ccd81c62d4" [ 1011.100390] env[69994]: _type = "Task" [ 1011.100390] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.107932] env[69994]: DEBUG oslo_vmware.api [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52c79bfc-c24b-5bb6-98fc-c3ccd81c62d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.183439] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6de833dd-908a-4ece-8835-3026329d6d20 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.191812] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d369d907-8e6c-48d5-b412-0e3de05a5bb7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.224240] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e835d8b-0927-46e0-8367-99b9040bec8b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.231766] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f420398e-4803-4409-a412-dff75ecd697e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.246585] env[69994]: DEBUG nova.compute.provider_tree [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1011.287048] env[69994]: DEBUG oslo_vmware.api [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Task: {'id': task-3242311, 'name': ReconfigVM_Task, 'duration_secs': 0.290076} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.287821] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Reconfigured VM instance instance-00000053 to attach disk [datastore1] eed22b8d-f8ea-4b90-8730-61d9a89ddfaa/eed22b8d-f8ea-4b90-8730-61d9a89ddfaa.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1011.288629] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5564778f-a52b-4386-a86a-7048a9ef969f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.294726] env[69994]: DEBUG oslo_vmware.api [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Waiting for the task: (returnval){ [ 1011.294726] env[69994]: value = "task-3242313" [ 1011.294726] env[69994]: _type = "Task" [ 1011.294726] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.302781] env[69994]: DEBUG oslo_vmware.api [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Task: {'id': task-3242313, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.586887] env[69994]: DEBUG oslo_vmware.api [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242312, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.609339] env[69994]: DEBUG oslo_vmware.api [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52c79bfc-c24b-5bb6-98fc-c3ccd81c62d4, 'name': SearchDatastore_Task, 'duration_secs': 0.010121} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.609445] env[69994]: DEBUG oslo_concurrency.lockutils [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1011.609692] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 8f5a5852-cd78-434f-b413-3cc2314575bb/8f5a5852-cd78-434f-b413-3cc2314575bb.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1011.610061] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-59444b97-5a6a-463b-b2a0-a5d6c080ccc7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.614882] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1011.616252] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dd86f143-4818-4b0d-94cd-736536fff473 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.617906] env[69994]: DEBUG oslo_vmware.api [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1011.617906] env[69994]: value = "task-3242314" [ 1011.617906] env[69994]: _type = "Task" [ 1011.617906] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.622831] env[69994]: DEBUG oslo_vmware.api [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 1011.622831] env[69994]: value = "task-3242315" [ 1011.622831] env[69994]: _type = "Task" [ 1011.622831] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.629382] env[69994]: DEBUG oslo_vmware.api [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242314, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.634829] env[69994]: DEBUG oslo_vmware.api [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242315, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.750730] env[69994]: DEBUG nova.scheduler.client.report [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1011.806200] env[69994]: DEBUG oslo_vmware.api [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Task: {'id': task-3242313, 'name': Rename_Task, 'duration_secs': 0.155596} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.806351] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1011.806676] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c8a1d46d-2fec-438c-8f28-387b07f57f83 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.812691] env[69994]: DEBUG oslo_vmware.api [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Waiting for the task: (returnval){ [ 1011.812691] env[69994]: value = "task-3242316" [ 1011.812691] env[69994]: _type = "Task" [ 1011.812691] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.820586] env[69994]: DEBUG oslo_vmware.api [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Task: {'id': task-3242316, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.088354] env[69994]: DEBUG oslo_vmware.api [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242312, 'name': CreateSnapshot_Task, 'duration_secs': 0.529324} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.088804] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Created Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1012.089675] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6758f990-779c-458e-8bb6-a05e73f46cdb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.131466] env[69994]: DEBUG oslo_vmware.api [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242314, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.137672] env[69994]: DEBUG oslo_vmware.api [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242315, 'name': PowerOffVM_Task, 'duration_secs': 0.219888} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.138091] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1012.138992] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04974685-e189-4dee-8b9a-645efaf2ff6d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.163857] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b1dab2b-7949-4267-97ee-e1c154987b73 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.202739] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1012.203028] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-02f78ca2-f240-4be7-8a0a-c48f7499bf8e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.209471] env[69994]: DEBUG oslo_vmware.api [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 1012.209471] env[69994]: value = "task-3242317" [ 1012.209471] env[69994]: _type = "Task" [ 1012.209471] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.218961] env[69994]: DEBUG oslo_vmware.api [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242317, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.255582] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.327s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1012.256165] env[69994]: DEBUG nova.compute.manager [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1012.260034] env[69994]: DEBUG oslo_concurrency.lockutils [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 9.549s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1012.260034] env[69994]: DEBUG nova.objects.instance [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69994) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1012.326014] env[69994]: DEBUG oslo_vmware.api [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Task: {'id': task-3242316, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.614419] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Creating linked-clone VM from snapshot {{(pid=69994) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1012.614834] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-2d2b2434-4e19-42ac-a0cf-2549bcf75b5f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.632753] env[69994]: DEBUG oslo_vmware.api [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242314, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.6009} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.632832] env[69994]: DEBUG oslo_vmware.api [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1012.632832] env[69994]: value = "task-3242318" [ 1012.632832] env[69994]: _type = "Task" [ 1012.632832] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.633083] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 8f5a5852-cd78-434f-b413-3cc2314575bb/8f5a5852-cd78-434f-b413-3cc2314575bb.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1012.633328] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1012.633644] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3ea71da8-e14d-4ff8-b04f-80a428454ce3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.644767] env[69994]: DEBUG oslo_vmware.api [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242318, 'name': CloneVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.646395] env[69994]: DEBUG oslo_vmware.api [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1012.646395] env[69994]: value = "task-3242319" [ 1012.646395] env[69994]: _type = "Task" [ 1012.646395] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.656327] env[69994]: DEBUG oslo_vmware.api [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242319, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.721531] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] VM already powered off {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1012.721868] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1012.722218] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.722397] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1012.722582] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1012.724390] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b43aa54c-0112-43d7-a8e4-ba075d26f197 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.742934] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1012.743271] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1012.744141] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d026239c-79b5-44e8-98ee-a1437a8247c7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.751207] env[69994]: DEBUG oslo_vmware.api [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 1012.751207] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5266b0df-796d-12f8-51ee-313ea76549dc" [ 1012.751207] env[69994]: _type = "Task" [ 1012.751207] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.760735] env[69994]: DEBUG oslo_vmware.api [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5266b0df-796d-12f8-51ee-313ea76549dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.764890] env[69994]: DEBUG nova.compute.utils [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1012.772015] env[69994]: DEBUG nova.compute.manager [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1012.772015] env[69994]: DEBUG nova.network.neutron [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1012.824009] env[69994]: DEBUG oslo_vmware.api [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Task: {'id': task-3242316, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.844704] env[69994]: DEBUG nova.policy [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '46288c5788b14d0fa0f5c663f1b6467d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bdf74d74267246d4b3de13f90b277f12', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1013.148901] env[69994]: DEBUG oslo_vmware.api [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242318, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.155710] env[69994]: DEBUG oslo_vmware.api [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242319, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073491} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.155978] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1013.160384] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c4fc8f2-3ef2-4983-82d0-fe3c4c56a331 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.181775] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] 8f5a5852-cd78-434f-b413-3cc2314575bb/8f5a5852-cd78-434f-b413-3cc2314575bb.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1013.182133] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e6c2aa95-b1c9-484f-8c2c-79839b858719 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.203559] env[69994]: DEBUG oslo_vmware.api [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1013.203559] env[69994]: value = "task-3242320" [ 1013.203559] env[69994]: _type = "Task" [ 1013.203559] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.212371] env[69994]: DEBUG oslo_vmware.api [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242320, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.247946] env[69994]: DEBUG nova.network.neutron [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Successfully created port: 144ed90e-dece-4cae-a85e-25e46c84dd0b {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1013.261236] env[69994]: DEBUG oslo_vmware.api [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5266b0df-796d-12f8-51ee-313ea76549dc, 'name': SearchDatastore_Task, 'duration_secs': 0.021084} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.262030] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e08f66c-82e0-4d25-9f1e-a2eaf86c3a5c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.269109] env[69994]: DEBUG oslo_vmware.api [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 1013.269109] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]526ad82e-95d8-9c96-68c3-adb5570b57b2" [ 1013.269109] env[69994]: _type = "Task" [ 1013.269109] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.273087] env[69994]: DEBUG oslo_concurrency.lockutils [None req-869a4e1a-e5f4-4515-bb2f-486d4a58e247 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1013.277973] env[69994]: DEBUG nova.compute.manager [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1013.277973] env[69994]: DEBUG oslo_concurrency.lockutils [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.221s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1013.279313] env[69994]: INFO nova.compute.claims [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1013.287363] env[69994]: DEBUG oslo_vmware.api [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]526ad82e-95d8-9c96-68c3-adb5570b57b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.326294] env[69994]: DEBUG oslo_vmware.api [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Task: {'id': task-3242316, 'name': PowerOnVM_Task, 'duration_secs': 1.092136} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.326564] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1013.326818] env[69994]: INFO nova.compute.manager [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Took 8.24 seconds to spawn the instance on the hypervisor. [ 1013.327172] env[69994]: DEBUG nova.compute.manager [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1013.327786] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3dd1971-2964-43c6-9e7b-55332a083494 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.432038] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Acquiring lock "81bae584-e558-4f96-9696-2510fed5a2e0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1013.432396] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Lock "81bae584-e558-4f96-9696-2510fed5a2e0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1013.647162] env[69994]: DEBUG oslo_vmware.api [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242318, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.714711] env[69994]: DEBUG oslo_vmware.api [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242320, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.793855] env[69994]: DEBUG oslo_vmware.api [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]526ad82e-95d8-9c96-68c3-adb5570b57b2, 'name': SearchDatastore_Task, 'duration_secs': 0.018293} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.797019] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1013.797019] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] ef410b09-8686-409e-8391-d50cd0e0df04/cc2e14cc-b12f-480a-a387-dd21e9efda8b-rescue.vmdk. {{(pid=69994) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1013.797019] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-656587a6-168c-4d16-8a89-05c6b4e245ec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.807363] env[69994]: DEBUG oslo_vmware.api [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 1013.807363] env[69994]: value = "task-3242321" [ 1013.807363] env[69994]: _type = "Task" [ 1013.807363] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.817735] env[69994]: DEBUG oslo_vmware.api [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242321, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.848856] env[69994]: INFO nova.compute.manager [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Took 35.70 seconds to build instance. [ 1013.938151] env[69994]: DEBUG nova.compute.manager [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1014.149530] env[69994]: DEBUG oslo_vmware.api [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242318, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.226111] env[69994]: DEBUG oslo_vmware.api [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242320, 'name': ReconfigVM_Task, 'duration_secs': 0.699703} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.226111] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Reconfigured VM instance instance-00000052 to attach disk [datastore1] 8f5a5852-cd78-434f-b413-3cc2314575bb/8f5a5852-cd78-434f-b413-3cc2314575bb.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1014.226111] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1f5cfed8-ba0a-4ee9-bba3-84093f5a09e6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.235050] env[69994]: DEBUG oslo_vmware.api [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1014.235050] env[69994]: value = "task-3242322" [ 1014.235050] env[69994]: _type = "Task" [ 1014.235050] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.244407] env[69994]: DEBUG oslo_vmware.api [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242322, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.302498] env[69994]: DEBUG nova.compute.manager [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1014.319193] env[69994]: DEBUG oslo_vmware.api [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242321, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.506501} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.319549] env[69994]: INFO nova.virt.vmwareapi.ds_util [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] ef410b09-8686-409e-8391-d50cd0e0df04/cc2e14cc-b12f-480a-a387-dd21e9efda8b-rescue.vmdk. [ 1014.320422] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e39e6eae-bf31-4f2f-9cf6-dbc716591ecd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.352564] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Reconfiguring VM instance instance-00000040 to attach disk [datastore2] ef410b09-8686-409e-8391-d50cd0e0df04/cc2e14cc-b12f-480a-a387-dd21e9efda8b-rescue.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1014.355800] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ec47b10b-f248-4e84-bb1d-054132559a7b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.370304] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1afdfd79-89bc-4a07-bec7-5e88d03b5a2d tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Lock "eed22b8d-f8ea-4b90-8730-61d9a89ddfaa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.224s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1014.372703] env[69994]: DEBUG nova.virt.hardware [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1014.372956] env[69994]: DEBUG nova.virt.hardware [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1014.373157] env[69994]: DEBUG nova.virt.hardware [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1014.373368] env[69994]: DEBUG nova.virt.hardware [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1014.373517] env[69994]: DEBUG nova.virt.hardware [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1014.373667] env[69994]: DEBUG nova.virt.hardware [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1014.373876] env[69994]: DEBUG nova.virt.hardware [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1014.374046] env[69994]: DEBUG nova.virt.hardware [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1014.374218] env[69994]: DEBUG nova.virt.hardware [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1014.374379] env[69994]: DEBUG nova.virt.hardware [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1014.374548] env[69994]: DEBUG nova.virt.hardware [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1014.375721] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54c13f78-b16c-470d-857f-27b8a085cb19 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.384582] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8930558a-14b9-45d0-abc8-ddca92f1568d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.389274] env[69994]: DEBUG oslo_vmware.api [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 1014.389274] env[69994]: value = "task-3242323" [ 1014.389274] env[69994]: _type = "Task" [ 1014.389274] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.413121] env[69994]: DEBUG oslo_vmware.api [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242323, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.460396] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1014.611061] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9a730f8-d492-47e3-a0a5-6fa7e5e74b1e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.617989] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc21f1f2-393e-4650-830c-8f1d893df7c6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.652726] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0d5c2c9-afeb-4005-9362-c72dbfc7aea5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.664806] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c756b745-9ec2-4f7f-a553-2bb78aaa6715 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.667759] env[69994]: DEBUG oslo_vmware.api [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242318, 'name': CloneVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.679647] env[69994]: DEBUG nova.compute.provider_tree [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1014.745582] env[69994]: DEBUG nova.compute.manager [req-f2430cbf-9f11-4c13-ba56-b9e8f1c0427a req-d760b445-7261-459b-8a26-61a5785e78b3 service nova] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Received event network-vif-plugged-144ed90e-dece-4cae-a85e-25e46c84dd0b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1014.745802] env[69994]: DEBUG oslo_concurrency.lockutils [req-f2430cbf-9f11-4c13-ba56-b9e8f1c0427a req-d760b445-7261-459b-8a26-61a5785e78b3 service nova] Acquiring lock "95b7d534-ac5b-4982-830d-bf65ecd610b3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1014.746558] env[69994]: DEBUG oslo_concurrency.lockutils [req-f2430cbf-9f11-4c13-ba56-b9e8f1c0427a req-d760b445-7261-459b-8a26-61a5785e78b3 service nova] Lock "95b7d534-ac5b-4982-830d-bf65ecd610b3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1014.746558] env[69994]: DEBUG oslo_concurrency.lockutils [req-f2430cbf-9f11-4c13-ba56-b9e8f1c0427a req-d760b445-7261-459b-8a26-61a5785e78b3 service nova] Lock "95b7d534-ac5b-4982-830d-bf65ecd610b3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1014.746558] env[69994]: DEBUG nova.compute.manager [req-f2430cbf-9f11-4c13-ba56-b9e8f1c0427a req-d760b445-7261-459b-8a26-61a5785e78b3 service nova] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] No waiting events found dispatching network-vif-plugged-144ed90e-dece-4cae-a85e-25e46c84dd0b {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1014.746702] env[69994]: WARNING nova.compute.manager [req-f2430cbf-9f11-4c13-ba56-b9e8f1c0427a req-d760b445-7261-459b-8a26-61a5785e78b3 service nova] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Received unexpected event network-vif-plugged-144ed90e-dece-4cae-a85e-25e46c84dd0b for instance with vm_state building and task_state spawning. [ 1014.754080] env[69994]: DEBUG oslo_vmware.api [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242322, 'name': Rename_Task, 'duration_secs': 0.227481} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.754576] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1014.754842] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5162898b-bcb5-44c6-b98e-e7891b0cbc26 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.760466] env[69994]: DEBUG oslo_vmware.api [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1014.760466] env[69994]: value = "task-3242324" [ 1014.760466] env[69994]: _type = "Task" [ 1014.760466] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.768941] env[69994]: DEBUG oslo_vmware.api [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242324, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.800905] env[69994]: DEBUG nova.network.neutron [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Successfully updated port: 144ed90e-dece-4cae-a85e-25e46c84dd0b {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1014.900739] env[69994]: DEBUG oslo_vmware.api [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242323, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.159397] env[69994]: DEBUG oslo_vmware.api [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242318, 'name': CloneVM_Task, 'duration_secs': 2.090173} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.159664] env[69994]: INFO nova.virt.vmwareapi.vmops [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Created linked-clone VM from snapshot [ 1015.160453] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ed56b50-8673-47e3-8bf0-164bc1e451eb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.167628] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Uploading image b3182bd5-a318-4328-8617-30fb9c61ad32 {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1015.183840] env[69994]: DEBUG nova.scheduler.client.report [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1015.193688] env[69994]: DEBUG oslo_vmware.rw_handles [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1015.193688] env[69994]: value = "vm-647978" [ 1015.193688] env[69994]: _type = "VirtualMachine" [ 1015.193688] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1015.193931] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-0b739a1d-eeb9-4d25-b0ec-931624c7eee9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.201877] env[69994]: DEBUG oslo_vmware.rw_handles [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lease: (returnval){ [ 1015.201877] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]526ef5b7-cd80-c269-cc1d-50caf0b627c0" [ 1015.201877] env[69994]: _type = "HttpNfcLease" [ 1015.201877] env[69994]: } obtained for exporting VM: (result){ [ 1015.201877] env[69994]: value = "vm-647978" [ 1015.201877] env[69994]: _type = "VirtualMachine" [ 1015.201877] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1015.201877] env[69994]: DEBUG oslo_vmware.api [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the lease: (returnval){ [ 1015.201877] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]526ef5b7-cd80-c269-cc1d-50caf0b627c0" [ 1015.201877] env[69994]: _type = "HttpNfcLease" [ 1015.201877] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1015.212617] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1015.212617] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]526ef5b7-cd80-c269-cc1d-50caf0b627c0" [ 1015.212617] env[69994]: _type = "HttpNfcLease" [ 1015.212617] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1015.272324] env[69994]: DEBUG oslo_vmware.api [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242324, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.301719] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Acquiring lock "refresh_cache-95b7d534-ac5b-4982-830d-bf65ecd610b3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.301884] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Acquired lock "refresh_cache-95b7d534-ac5b-4982-830d-bf65ecd610b3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1015.302063] env[69994]: DEBUG nova.network.neutron [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1015.375378] env[69994]: DEBUG oslo_concurrency.lockutils [None req-75f39166-1ffc-4040-a8c0-5fa40d188a1e tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Acquiring lock "eed22b8d-f8ea-4b90-8730-61d9a89ddfaa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1015.375687] env[69994]: DEBUG oslo_concurrency.lockutils [None req-75f39166-1ffc-4040-a8c0-5fa40d188a1e tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Lock "eed22b8d-f8ea-4b90-8730-61d9a89ddfaa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1015.375902] env[69994]: DEBUG oslo_concurrency.lockutils [None req-75f39166-1ffc-4040-a8c0-5fa40d188a1e tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Acquiring lock "eed22b8d-f8ea-4b90-8730-61d9a89ddfaa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1015.376126] env[69994]: DEBUG oslo_concurrency.lockutils [None req-75f39166-1ffc-4040-a8c0-5fa40d188a1e tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Lock "eed22b8d-f8ea-4b90-8730-61d9a89ddfaa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1015.376303] env[69994]: DEBUG oslo_concurrency.lockutils [None req-75f39166-1ffc-4040-a8c0-5fa40d188a1e tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Lock "eed22b8d-f8ea-4b90-8730-61d9a89ddfaa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1015.378909] env[69994]: INFO nova.compute.manager [None req-75f39166-1ffc-4040-a8c0-5fa40d188a1e tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Terminating instance [ 1015.403566] env[69994]: DEBUG oslo_vmware.api [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242323, 'name': ReconfigVM_Task, 'duration_secs': 0.867003} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.403566] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Reconfigured VM instance instance-00000040 to attach disk [datastore2] ef410b09-8686-409e-8391-d50cd0e0df04/cc2e14cc-b12f-480a-a387-dd21e9efda8b-rescue.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1015.403798] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9978bae9-f84a-4310-920c-cc8952b22dd3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.436105] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-83a00863-3b1c-46c0-843b-ee1855a97f9c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.454841] env[69994]: DEBUG oslo_vmware.api [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 1015.454841] env[69994]: value = "task-3242326" [ 1015.454841] env[69994]: _type = "Task" [ 1015.454841] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.466339] env[69994]: DEBUG oslo_vmware.api [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242326, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.690634] env[69994]: DEBUG oslo_concurrency.lockutils [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.413s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1015.691444] env[69994]: DEBUG nova.compute.manager [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1015.694905] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7125dfc4-dc4a-429e-ae7a-f4a25c003667 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.822s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1015.695144] env[69994]: DEBUG nova.objects.instance [None req-7125dfc4-dc4a-429e-ae7a-f4a25c003667 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Lazy-loading 'resources' on Instance uuid 095e75b1-7806-4d1d-ab9e-49735f7aa0f3 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1015.712974] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1015.712974] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]526ef5b7-cd80-c269-cc1d-50caf0b627c0" [ 1015.712974] env[69994]: _type = "HttpNfcLease" [ 1015.712974] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1015.713288] env[69994]: DEBUG oslo_vmware.rw_handles [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1015.713288] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]526ef5b7-cd80-c269-cc1d-50caf0b627c0" [ 1015.713288] env[69994]: _type = "HttpNfcLease" [ 1015.713288] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1015.714927] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c48d4bae-c0ba-42f9-9e3f-47fae23c3ab6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.724293] env[69994]: DEBUG oslo_vmware.rw_handles [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521942bd-ee16-36a0-dafe-c7a607f0ed46/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1015.724293] env[69994]: DEBUG oslo_vmware.rw_handles [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521942bd-ee16-36a0-dafe-c7a607f0ed46/disk-0.vmdk for reading. {{(pid=69994) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1015.807999] env[69994]: DEBUG oslo_vmware.api [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242324, 'name': PowerOnVM_Task, 'duration_secs': 0.903972} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.807999] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1015.807999] env[69994]: DEBUG nova.compute.manager [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1015.808945] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caee960b-9b3e-409b-8c5b-18060cd8156d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.836596] env[69994]: DEBUG nova.network.neutron [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1015.882213] env[69994]: DEBUG nova.compute.manager [None req-75f39166-1ffc-4040-a8c0-5fa40d188a1e tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1015.882419] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-75f39166-1ffc-4040-a8c0-5fa40d188a1e tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1015.883276] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6cc2aa9-899d-431c-8ffe-16b6c815f5e5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.890748] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-75f39166-1ffc-4040-a8c0-5fa40d188a1e tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1015.891164] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b6149b36-3ac5-4889-81b9-582b5bc4e347 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.897191] env[69994]: DEBUG oslo_vmware.api [None req-75f39166-1ffc-4040-a8c0-5fa40d188a1e tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Waiting for the task: (returnval){ [ 1015.897191] env[69994]: value = "task-3242327" [ 1015.897191] env[69994]: _type = "Task" [ 1015.897191] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.905294] env[69994]: DEBUG oslo_vmware.api [None req-75f39166-1ffc-4040-a8c0-5fa40d188a1e tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Task: {'id': task-3242327, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.935997] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-9e9668ee-9433-4d48-b421-d0b844db769c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.968933] env[69994]: DEBUG oslo_vmware.api [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242326, 'name': ReconfigVM_Task, 'duration_secs': 0.252091} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.974685] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1015.975250] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7d99fa86-19a7-4af7-b776-0409ea67dc04 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.982488] env[69994]: DEBUG oslo_vmware.api [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 1015.982488] env[69994]: value = "task-3242328" [ 1015.982488] env[69994]: _type = "Task" [ 1015.982488] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.986304] env[69994]: DEBUG nova.network.neutron [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Updating instance_info_cache with network_info: [{"id": "144ed90e-dece-4cae-a85e-25e46c84dd0b", "address": "fa:16:3e:01:54:43", "network": {"id": "e85b49ba-5d2d-421f-a86b-216d1d8b2b4c", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-983214706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bdf74d74267246d4b3de13f90b277f12", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8059554c-499f-44b4-be06-29f80ec36b34", "external-id": "nsx-vlan-transportzone-892", "segmentation_id": 892, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap144ed90e-de", "ovs_interfaceid": "144ed90e-dece-4cae-a85e-25e46c84dd0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1015.990290] env[69994]: DEBUG oslo_vmware.api [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242328, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.119782] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Acquiring lock "071151e4-a3ee-4a89-8b83-19bef3fb7d3e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1016.120063] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Lock "071151e4-a3ee-4a89-8b83-19bef3fb7d3e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1016.148488] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Acquiring lock "384889a3-c3d9-4e0e-8d1c-95193cf4343d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1016.148646] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Lock "384889a3-c3d9-4e0e-8d1c-95193cf4343d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1016.184813] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Acquiring lock "799bf051-86b4-45bd-b9bf-df767074dac8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1016.185136] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Lock "799bf051-86b4-45bd-b9bf-df767074dac8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1016.197573] env[69994]: DEBUG nova.compute.utils [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1016.199386] env[69994]: DEBUG nova.compute.manager [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1016.199620] env[69994]: DEBUG nova.network.neutron [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1016.242387] env[69994]: DEBUG nova.policy [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bb1874902bc24959b717674a99e530a9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ee188ea80c9847188df8b8482b7c6ec7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1016.328617] env[69994]: DEBUG oslo_concurrency.lockutils [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1016.409756] env[69994]: DEBUG oslo_vmware.api [None req-75f39166-1ffc-4040-a8c0-5fa40d188a1e tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Task: {'id': task-3242327, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.493424] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Releasing lock "refresh_cache-95b7d534-ac5b-4982-830d-bf65ecd610b3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1016.493846] env[69994]: DEBUG nova.compute.manager [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Instance network_info: |[{"id": "144ed90e-dece-4cae-a85e-25e46c84dd0b", "address": "fa:16:3e:01:54:43", "network": {"id": "e85b49ba-5d2d-421f-a86b-216d1d8b2b4c", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-983214706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bdf74d74267246d4b3de13f90b277f12", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8059554c-499f-44b4-be06-29f80ec36b34", "external-id": "nsx-vlan-transportzone-892", "segmentation_id": 892, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap144ed90e-de", "ovs_interfaceid": "144ed90e-dece-4cae-a85e-25e46c84dd0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1016.495039] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:01:54:43', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8059554c-499f-44b4-be06-29f80ec36b34', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '144ed90e-dece-4cae-a85e-25e46c84dd0b', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1016.503273] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Creating folder: Project (bdf74d74267246d4b3de13f90b277f12). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1016.509368] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3bfe620e-4813-48d7-ad45-fbdf9560fa5b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.509368] env[69994]: DEBUG oslo_vmware.api [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242328, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.523113] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Created folder: Project (bdf74d74267246d4b3de13f90b277f12) in parent group-v647729. [ 1016.523449] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Creating folder: Instances. Parent ref: group-v647979. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1016.523701] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fa32e62e-9e42-4b0f-92e0-ab9df0d3fa60 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.532264] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a6b5149-359b-4007-a922-6571546a3ac4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.537352] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Created folder: Instances in parent group-v647979. [ 1016.537649] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1016.538303] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1016.538586] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dca0356c-c0a8-4f6c-a474-b7a8fe696ac8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.560086] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e561697b-1d17-4968-87a7-2b856989953f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.566378] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1016.566378] env[69994]: value = "task-3242331" [ 1016.566378] env[69994]: _type = "Task" [ 1016.566378] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.604255] env[69994]: DEBUG nova.network.neutron [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Successfully created port: 8c33cc1e-6e3c-4b24-b456-71e80fc23840 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1016.607154] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-990fadee-fd15-4f62-995b-c222008370b1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.614905] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242331, 'name': CreateVM_Task} progress is 15%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.620340] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e1d9176-e0ef-40ae-aa28-004db5b37280 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.624680] env[69994]: DEBUG nova.compute.manager [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1016.638163] env[69994]: DEBUG nova.compute.provider_tree [None req-7125dfc4-dc4a-429e-ae7a-f4a25c003667 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1016.653738] env[69994]: DEBUG nova.compute.manager [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1016.687710] env[69994]: DEBUG nova.compute.manager [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1016.702499] env[69994]: DEBUG nova.compute.manager [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1016.876199] env[69994]: DEBUG nova.compute.manager [req-e7f2c492-f9ff-498d-8713-825f6b166191 req-48717e7f-438b-4acf-bc99-3eddc9385fe6 service nova] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Received event network-changed-144ed90e-dece-4cae-a85e-25e46c84dd0b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1016.876199] env[69994]: DEBUG nova.compute.manager [req-e7f2c492-f9ff-498d-8713-825f6b166191 req-48717e7f-438b-4acf-bc99-3eddc9385fe6 service nova] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Refreshing instance network info cache due to event network-changed-144ed90e-dece-4cae-a85e-25e46c84dd0b. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1016.876491] env[69994]: DEBUG oslo_concurrency.lockutils [req-e7f2c492-f9ff-498d-8713-825f6b166191 req-48717e7f-438b-4acf-bc99-3eddc9385fe6 service nova] Acquiring lock "refresh_cache-95b7d534-ac5b-4982-830d-bf65ecd610b3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1016.876491] env[69994]: DEBUG oslo_concurrency.lockutils [req-e7f2c492-f9ff-498d-8713-825f6b166191 req-48717e7f-438b-4acf-bc99-3eddc9385fe6 service nova] Acquired lock "refresh_cache-95b7d534-ac5b-4982-830d-bf65ecd610b3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1016.876711] env[69994]: DEBUG nova.network.neutron [req-e7f2c492-f9ff-498d-8713-825f6b166191 req-48717e7f-438b-4acf-bc99-3eddc9385fe6 service nova] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Refreshing network info cache for port 144ed90e-dece-4cae-a85e-25e46c84dd0b {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1016.910184] env[69994]: DEBUG oslo_vmware.api [None req-75f39166-1ffc-4040-a8c0-5fa40d188a1e tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Task: {'id': task-3242327, 'name': PowerOffVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.994632] env[69994]: DEBUG oslo_vmware.api [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242328, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.077193] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242331, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.141045] env[69994]: DEBUG nova.scheduler.client.report [None req-7125dfc4-dc4a-429e-ae7a-f4a25c003667 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1017.153465] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1017.178384] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1017.211584] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1017.411280] env[69994]: DEBUG oslo_vmware.api [None req-75f39166-1ffc-4040-a8c0-5fa40d188a1e tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Task: {'id': task-3242327, 'name': PowerOffVM_Task, 'duration_secs': 1.053662} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.411643] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-75f39166-1ffc-4040-a8c0-5fa40d188a1e tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1017.412429] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-75f39166-1ffc-4040-a8c0-5fa40d188a1e tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1017.412690] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2feb72fb-e55b-4901-a915-e852c302881e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.476640] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-75f39166-1ffc-4040-a8c0-5fa40d188a1e tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1017.476894] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-75f39166-1ffc-4040-a8c0-5fa40d188a1e tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1017.477132] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-75f39166-1ffc-4040-a8c0-5fa40d188a1e tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Deleting the datastore file [datastore1] eed22b8d-f8ea-4b90-8730-61d9a89ddfaa {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1017.477360] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a47c3f82-c248-41d1-8051-f9a39c5b6f1c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.484319] env[69994]: DEBUG oslo_vmware.api [None req-75f39166-1ffc-4040-a8c0-5fa40d188a1e tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Waiting for the task: (returnval){ [ 1017.484319] env[69994]: value = "task-3242333" [ 1017.484319] env[69994]: _type = "Task" [ 1017.484319] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.509436] env[69994]: DEBUG oslo_vmware.api [None req-75f39166-1ffc-4040-a8c0-5fa40d188a1e tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Task: {'id': task-3242333, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.515415] env[69994]: DEBUG oslo_vmware.api [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242328, 'name': PowerOnVM_Task, 'duration_secs': 1.183766} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.518491] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1017.522553] env[69994]: DEBUG nova.compute.manager [None req-3897853a-00af-491a-8ca1-d51c78b90eae tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1017.523603] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5598c139-ed87-4ad1-8d67-b1e96e558ad4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.567898] env[69994]: DEBUG oslo_concurrency.lockutils [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "8f5a5852-cd78-434f-b413-3cc2314575bb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1017.568229] env[69994]: DEBUG oslo_concurrency.lockutils [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "8f5a5852-cd78-434f-b413-3cc2314575bb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1017.568479] env[69994]: DEBUG oslo_concurrency.lockutils [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "8f5a5852-cd78-434f-b413-3cc2314575bb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1017.572227] env[69994]: DEBUG oslo_concurrency.lockutils [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "8f5a5852-cd78-434f-b413-3cc2314575bb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1017.572227] env[69994]: DEBUG oslo_concurrency.lockutils [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "8f5a5852-cd78-434f-b413-3cc2314575bb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1017.577824] env[69994]: INFO nova.compute.manager [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Terminating instance [ 1017.585662] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242331, 'name': CreateVM_Task, 'duration_secs': 0.603411} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.586343] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1017.586632] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.586906] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1017.587293] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1017.588027] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a488f0a-c1dc-4c44-8f53-dad139baebb1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.598499] env[69994]: DEBUG oslo_vmware.api [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Waiting for the task: (returnval){ [ 1017.598499] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52623ea9-5a4c-e171-25a8-9cd7a68abb09" [ 1017.598499] env[69994]: _type = "Task" [ 1017.598499] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.607021] env[69994]: DEBUG oslo_vmware.api [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52623ea9-5a4c-e171-25a8-9cd7a68abb09, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.647157] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7125dfc4-dc4a-429e-ae7a-f4a25c003667 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.951s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1017.648927] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 7.611s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1017.660529] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9c843595-559b-418c-976d-4ad9b9244a6e tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Acquiring lock "6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1017.660804] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9c843595-559b-418c-976d-4ad9b9244a6e tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Lock "6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1017.661017] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9c843595-559b-418c-976d-4ad9b9244a6e tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Acquiring lock "6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1017.661208] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9c843595-559b-418c-976d-4ad9b9244a6e tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Lock "6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1017.661377] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9c843595-559b-418c-976d-4ad9b9244a6e tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Lock "6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1017.664632] env[69994]: INFO nova.compute.manager [None req-9c843595-559b-418c-976d-4ad9b9244a6e tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Terminating instance [ 1017.684734] env[69994]: INFO nova.scheduler.client.report [None req-7125dfc4-dc4a-429e-ae7a-f4a25c003667 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Deleted allocations for instance 095e75b1-7806-4d1d-ab9e-49735f7aa0f3 [ 1017.717367] env[69994]: DEBUG nova.compute.manager [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1017.764195] env[69994]: DEBUG nova.virt.hardware [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1017.765106] env[69994]: DEBUG nova.virt.hardware [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1017.765106] env[69994]: DEBUG nova.virt.hardware [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1017.765106] env[69994]: DEBUG nova.virt.hardware [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1017.765106] env[69994]: DEBUG nova.virt.hardware [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1017.765106] env[69994]: DEBUG nova.virt.hardware [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1017.765509] env[69994]: DEBUG nova.virt.hardware [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1017.765509] env[69994]: DEBUG nova.virt.hardware [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1017.765764] env[69994]: DEBUG nova.virt.hardware [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1017.765836] env[69994]: DEBUG nova.virt.hardware [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1017.765951] env[69994]: DEBUG nova.virt.hardware [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1017.767315] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-593e7300-6a67-49e1-8258-ef2c13c896f6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.777291] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdc623ba-08f4-4f7e-a3bd-ad778aba64ee {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.783364] env[69994]: DEBUG nova.network.neutron [req-e7f2c492-f9ff-498d-8713-825f6b166191 req-48717e7f-438b-4acf-bc99-3eddc9385fe6 service nova] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Updated VIF entry in instance network info cache for port 144ed90e-dece-4cae-a85e-25e46c84dd0b. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1017.783758] env[69994]: DEBUG nova.network.neutron [req-e7f2c492-f9ff-498d-8713-825f6b166191 req-48717e7f-438b-4acf-bc99-3eddc9385fe6 service nova] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Updating instance_info_cache with network_info: [{"id": "144ed90e-dece-4cae-a85e-25e46c84dd0b", "address": "fa:16:3e:01:54:43", "network": {"id": "e85b49ba-5d2d-421f-a86b-216d1d8b2b4c", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-983214706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bdf74d74267246d4b3de13f90b277f12", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8059554c-499f-44b4-be06-29f80ec36b34", "external-id": "nsx-vlan-transportzone-892", "segmentation_id": 892, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap144ed90e-de", "ovs_interfaceid": "144ed90e-dece-4cae-a85e-25e46c84dd0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1017.997624] env[69994]: DEBUG oslo_vmware.api [None req-75f39166-1ffc-4040-a8c0-5fa40d188a1e tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Task: {'id': task-3242333, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.294626} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.998044] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-75f39166-1ffc-4040-a8c0-5fa40d188a1e tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1017.998335] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-75f39166-1ffc-4040-a8c0-5fa40d188a1e tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1017.998626] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-75f39166-1ffc-4040-a8c0-5fa40d188a1e tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1017.998910] env[69994]: INFO nova.compute.manager [None req-75f39166-1ffc-4040-a8c0-5fa40d188a1e tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Took 2.12 seconds to destroy the instance on the hypervisor. [ 1017.999307] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-75f39166-1ffc-4040-a8c0-5fa40d188a1e tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1018.000752] env[69994]: DEBUG nova.compute.manager [-] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1018.001015] env[69994]: DEBUG nova.network.neutron [-] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1018.082673] env[69994]: DEBUG nova.compute.manager [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1018.083695] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1018.084633] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8a5c669-7a95-4bd6-9c52-a9a06e027b1f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.092823] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1018.093125] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-27fa0f8a-9bbb-4c77-8fd9-7f7eaad8bd73 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.099445] env[69994]: DEBUG oslo_vmware.api [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1018.099445] env[69994]: value = "task-3242334" [ 1018.099445] env[69994]: _type = "Task" [ 1018.099445] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.111894] env[69994]: DEBUG oslo_vmware.api [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242334, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.115903] env[69994]: DEBUG oslo_vmware.api [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52623ea9-5a4c-e171-25a8-9cd7a68abb09, 'name': SearchDatastore_Task, 'duration_secs': 0.015348} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.116202] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1018.116447] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1018.116685] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1018.116832] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1018.117020] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1018.117303] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-48aa91ab-c8bb-4241-8389-c01d3fb253c3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.126734] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1018.127040] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1018.127834] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93314c11-43d1-435b-b656-c040bf377db9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.133340] env[69994]: DEBUG oslo_vmware.api [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Waiting for the task: (returnval){ [ 1018.133340] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]529ac2ac-68e5-1230-03fa-da4f2902436b" [ 1018.133340] env[69994]: _type = "Task" [ 1018.133340] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.142236] env[69994]: DEBUG oslo_vmware.api [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]529ac2ac-68e5-1230-03fa-da4f2902436b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.171990] env[69994]: DEBUG nova.compute.manager [None req-9c843595-559b-418c-976d-4ad9b9244a6e tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1018.171990] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9c843595-559b-418c-976d-4ad9b9244a6e tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1018.172651] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a12d7919-e4f3-4a63-84fc-1bdaf3728190 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.181896] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c843595-559b-418c-976d-4ad9b9244a6e tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1018.182185] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7b52d41b-d514-41fb-a6a2-3d35b1fb11ee {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.189056] env[69994]: DEBUG nova.network.neutron [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Successfully updated port: 8c33cc1e-6e3c-4b24-b456-71e80fc23840 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1018.190236] env[69994]: DEBUG oslo_vmware.api [None req-9c843595-559b-418c-976d-4ad9b9244a6e tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for the task: (returnval){ [ 1018.190236] env[69994]: value = "task-3242335" [ 1018.190236] env[69994]: _type = "Task" [ 1018.190236] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.201880] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7125dfc4-dc4a-429e-ae7a-f4a25c003667 tempest-ServerShowV254Test-1196559966 tempest-ServerShowV254Test-1196559966-project-member] Lock "095e75b1-7806-4d1d-ab9e-49735f7aa0f3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.886s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1018.211027] env[69994]: DEBUG oslo_vmware.api [None req-9c843595-559b-418c-976d-4ad9b9244a6e tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242335, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.214201] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "ab99499b-21a2-465b-9975-4e0adb18df94" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1018.214462] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "ab99499b-21a2-465b-9975-4e0adb18df94" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1018.267365] env[69994]: DEBUG nova.compute.manager [req-f53570bb-988f-49dd-b119-9080330bf29b req-d420f047-9b36-4db2-9776-21d4f49c6277 service nova] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Received event network-vif-deleted-e9053dbb-d79a-4cac-a034-fe85d0ed9832 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1018.267642] env[69994]: INFO nova.compute.manager [req-f53570bb-988f-49dd-b119-9080330bf29b req-d420f047-9b36-4db2-9776-21d4f49c6277 service nova] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Neutron deleted interface e9053dbb-d79a-4cac-a034-fe85d0ed9832; detaching it from the instance and deleting it from the info cache [ 1018.267857] env[69994]: DEBUG nova.network.neutron [req-f53570bb-988f-49dd-b119-9080330bf29b req-d420f047-9b36-4db2-9776-21d4f49c6277 service nova] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1018.287412] env[69994]: DEBUG oslo_concurrency.lockutils [req-e7f2c492-f9ff-498d-8713-825f6b166191 req-48717e7f-438b-4acf-bc99-3eddc9385fe6 service nova] Releasing lock "refresh_cache-95b7d534-ac5b-4982-830d-bf65ecd610b3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1018.612691] env[69994]: DEBUG oslo_vmware.api [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242334, 'name': PowerOffVM_Task, 'duration_secs': 0.256712} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.612973] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1018.613162] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1018.613415] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a4013ddb-1e2a-456d-91a3-e5cdc164f3b5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.643059] env[69994]: DEBUG oslo_vmware.api [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]529ac2ac-68e5-1230-03fa-da4f2902436b, 'name': SearchDatastore_Task, 'duration_secs': 0.014216} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.643884] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9c42888-6a73-4c35-8178-999d408c7a6a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.648836] env[69994]: DEBUG oslo_vmware.api [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Waiting for the task: (returnval){ [ 1018.648836] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52289989-b461-bf9a-3377-7e75aa4d5cb0" [ 1018.648836] env[69994]: _type = "Task" [ 1018.648836] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.656352] env[69994]: DEBUG oslo_vmware.api [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52289989-b461-bf9a-3377-7e75aa4d5cb0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.677015] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1018.677124] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1018.677589] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Deleting the datastore file [datastore1] 8f5a5852-cd78-434f-b413-3cc2314575bb {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1018.677694] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-deaabf7c-551f-4a7f-8ba0-17e9d0d0093e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.681812] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance ed662f67-be0e-4f19-bb8a-6af39b4d348c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1018.682012] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance b99b73e6-3348-4d5d-aa57-f01ace0bfc42 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1018.682197] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance eff21ec5-a51d-4004-9edf-1891f706fe9c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1018.682383] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance ef410b09-8686-409e-8391-d50cd0e0df04 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1018.682589] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance ef37ce64-2c26-4080-899a-6d9dbb5850c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1018.682788] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 29071eb9-6334-4c23-acb4-142c12aa448d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1018.682980] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 850930f9-d5fb-4546-9796-30e164a1cdd3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1018.683173] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance e17fcc84-7c86-41b6-88ec-8a35619534b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1018.683354] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1018.683532] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 290e8749-6860-4303-b966-65d2efee5499 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1018.683700] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 8f5a5852-cd78-434f-b413-3cc2314575bb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1018.683861] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance eed22b8d-f8ea-4b90-8730-61d9a89ddfaa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1018.684032] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 95b7d534-ac5b-4982-830d-bf65ecd610b3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1018.684196] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 47e80abc-2f7e-432c-bd2f-3064841401fc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1018.688572] env[69994]: DEBUG oslo_vmware.api [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1018.688572] env[69994]: value = "task-3242337" [ 1018.688572] env[69994]: _type = "Task" [ 1018.688572] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.693486] env[69994]: DEBUG oslo_concurrency.lockutils [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "refresh_cache-47e80abc-2f7e-432c-bd2f-3064841401fc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1018.693719] env[69994]: DEBUG oslo_concurrency.lockutils [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquired lock "refresh_cache-47e80abc-2f7e-432c-bd2f-3064841401fc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1018.693895] env[69994]: DEBUG nova.network.neutron [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1018.706099] env[69994]: DEBUG oslo_vmware.api [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242337, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.709045] env[69994]: DEBUG oslo_vmware.api [None req-9c843595-559b-418c-976d-4ad9b9244a6e tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242335, 'name': PowerOffVM_Task, 'duration_secs': 0.204848} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.709521] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c843595-559b-418c-976d-4ad9b9244a6e tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1018.709707] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9c843595-559b-418c-976d-4ad9b9244a6e tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1018.709959] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-60a389d0-b71e-4b1d-b27e-319555d63bbf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.716640] env[69994]: DEBUG nova.compute.manager [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1018.747891] env[69994]: DEBUG nova.network.neutron [-] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1018.770905] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9c843595-559b-418c-976d-4ad9b9244a6e tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1018.771524] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9c843595-559b-418c-976d-4ad9b9244a6e tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1018.771524] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c843595-559b-418c-976d-4ad9b9244a6e tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Deleting the datastore file [datastore2] 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1018.771669] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6fe7329f-8a99-49f6-bb5e-f87b9c147e51 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.773612] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2b1e7a00-9256-4a9b-8994-bf7462129afd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.783794] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cfe7d96-ae74-4be5-a7bd-c1337c9cee5a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.795700] env[69994]: DEBUG oslo_vmware.api [None req-9c843595-559b-418c-976d-4ad9b9244a6e tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for the task: (returnval){ [ 1018.795700] env[69994]: value = "task-3242339" [ 1018.795700] env[69994]: _type = "Task" [ 1018.795700] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.805284] env[69994]: DEBUG oslo_vmware.api [None req-9c843595-559b-418c-976d-4ad9b9244a6e tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242339, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.819892] env[69994]: DEBUG nova.compute.manager [req-f53570bb-988f-49dd-b119-9080330bf29b req-d420f047-9b36-4db2-9776-21d4f49c6277 service nova] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Detach interface failed, port_id=e9053dbb-d79a-4cac-a034-fe85d0ed9832, reason: Instance eed22b8d-f8ea-4b90-8730-61d9a89ddfaa could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1019.049028] env[69994]: DEBUG nova.compute.manager [req-a86d5601-d470-4dbe-b8ec-5de730f1786e req-eca05810-f6e9-424c-be2e-1e8dd3165891 service nova] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Received event network-vif-plugged-8c33cc1e-6e3c-4b24-b456-71e80fc23840 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1019.050452] env[69994]: DEBUG oslo_concurrency.lockutils [req-a86d5601-d470-4dbe-b8ec-5de730f1786e req-eca05810-f6e9-424c-be2e-1e8dd3165891 service nova] Acquiring lock "47e80abc-2f7e-432c-bd2f-3064841401fc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1019.050452] env[69994]: DEBUG oslo_concurrency.lockutils [req-a86d5601-d470-4dbe-b8ec-5de730f1786e req-eca05810-f6e9-424c-be2e-1e8dd3165891 service nova] Lock "47e80abc-2f7e-432c-bd2f-3064841401fc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1019.050452] env[69994]: DEBUG oslo_concurrency.lockutils [req-a86d5601-d470-4dbe-b8ec-5de730f1786e req-eca05810-f6e9-424c-be2e-1e8dd3165891 service nova] Lock "47e80abc-2f7e-432c-bd2f-3064841401fc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1019.050452] env[69994]: DEBUG nova.compute.manager [req-a86d5601-d470-4dbe-b8ec-5de730f1786e req-eca05810-f6e9-424c-be2e-1e8dd3165891 service nova] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] No waiting events found dispatching network-vif-plugged-8c33cc1e-6e3c-4b24-b456-71e80fc23840 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1019.050452] env[69994]: WARNING nova.compute.manager [req-a86d5601-d470-4dbe-b8ec-5de730f1786e req-eca05810-f6e9-424c-be2e-1e8dd3165891 service nova] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Received unexpected event network-vif-plugged-8c33cc1e-6e3c-4b24-b456-71e80fc23840 for instance with vm_state building and task_state spawning. [ 1019.050452] env[69994]: DEBUG nova.compute.manager [req-a86d5601-d470-4dbe-b8ec-5de730f1786e req-eca05810-f6e9-424c-be2e-1e8dd3165891 service nova] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Received event network-changed-8c33cc1e-6e3c-4b24-b456-71e80fc23840 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1019.050782] env[69994]: DEBUG nova.compute.manager [req-a86d5601-d470-4dbe-b8ec-5de730f1786e req-eca05810-f6e9-424c-be2e-1e8dd3165891 service nova] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Refreshing instance network info cache due to event network-changed-8c33cc1e-6e3c-4b24-b456-71e80fc23840. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1019.050782] env[69994]: DEBUG oslo_concurrency.lockutils [req-a86d5601-d470-4dbe-b8ec-5de730f1786e req-eca05810-f6e9-424c-be2e-1e8dd3165891 service nova] Acquiring lock "refresh_cache-47e80abc-2f7e-432c-bd2f-3064841401fc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.158953] env[69994]: DEBUG oslo_vmware.api [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52289989-b461-bf9a-3377-7e75aa4d5cb0, 'name': SearchDatastore_Task, 'duration_secs': 0.027552} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.159500] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1019.159500] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 95b7d534-ac5b-4982-830d-bf65ecd610b3/95b7d534-ac5b-4982-830d-bf65ecd610b3.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1019.159768] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8739861a-ce0a-4990-985c-aedd32e33ea0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.165749] env[69994]: DEBUG oslo_vmware.api [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Waiting for the task: (returnval){ [ 1019.165749] env[69994]: value = "task-3242340" [ 1019.165749] env[69994]: _type = "Task" [ 1019.165749] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.174353] env[69994]: DEBUG oslo_vmware.api [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Task: {'id': task-3242340, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.190085] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 81bae584-e558-4f96-9696-2510fed5a2e0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1019.201332] env[69994]: DEBUG oslo_vmware.api [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242337, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.240655] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1019.251377] env[69994]: INFO nova.compute.manager [-] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Took 1.25 seconds to deallocate network for instance. [ 1019.258840] env[69994]: DEBUG nova.network.neutron [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1019.305710] env[69994]: DEBUG oslo_vmware.api [None req-9c843595-559b-418c-976d-4ad9b9244a6e tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Task: {'id': task-3242339, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.377335} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.306036] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c843595-559b-418c-976d-4ad9b9244a6e tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1019.306172] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9c843595-559b-418c-976d-4ad9b9244a6e tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1019.306351] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9c843595-559b-418c-976d-4ad9b9244a6e tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1019.306523] env[69994]: INFO nova.compute.manager [None req-9c843595-559b-418c-976d-4ad9b9244a6e tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1019.306761] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9c843595-559b-418c-976d-4ad9b9244a6e tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1019.307334] env[69994]: DEBUG nova.compute.manager [-] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1019.307444] env[69994]: DEBUG nova.network.neutron [-] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1019.460273] env[69994]: DEBUG nova.network.neutron [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Updating instance_info_cache with network_info: [{"id": "8c33cc1e-6e3c-4b24-b456-71e80fc23840", "address": "fa:16:3e:68:00:b2", "network": {"id": "14e5b992-c393-4acc-ab6d-ad5983fe2729", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1321568807-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee188ea80c9847188df8b8482b7c6ec7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c33cc1e-6e", "ovs_interfaceid": "8c33cc1e-6e3c-4b24-b456-71e80fc23840", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1019.532037] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a138b189-5a21-471b-a3ce-4f0e3b2bbb6e tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "eff21ec5-a51d-4004-9edf-1891f706fe9c" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1019.532526] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a138b189-5a21-471b-a3ce-4f0e3b2bbb6e tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "eff21ec5-a51d-4004-9edf-1891f706fe9c" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1019.532774] env[69994]: INFO nova.compute.manager [None req-a138b189-5a21-471b-a3ce-4f0e3b2bbb6e tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Rebooting instance [ 1019.609385] env[69994]: INFO nova.compute.manager [None req-13841513-0a60-4fd7-a7c2-7bf7884d5600 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Unrescuing [ 1019.609385] env[69994]: DEBUG oslo_concurrency.lockutils [None req-13841513-0a60-4fd7-a7c2-7bf7884d5600 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquiring lock "refresh_cache-ef410b09-8686-409e-8391-d50cd0e0df04" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.609385] env[69994]: DEBUG oslo_concurrency.lockutils [None req-13841513-0a60-4fd7-a7c2-7bf7884d5600 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquired lock "refresh_cache-ef410b09-8686-409e-8391-d50cd0e0df04" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1019.609385] env[69994]: DEBUG nova.network.neutron [None req-13841513-0a60-4fd7-a7c2-7bf7884d5600 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1019.676057] env[69994]: DEBUG oslo_vmware.api [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Task: {'id': task-3242340, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.700793] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 071151e4-a3ee-4a89-8b83-19bef3fb7d3e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1019.710815] env[69994]: DEBUG oslo_vmware.api [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242337, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.581134} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.710815] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1019.712087] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1019.712087] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1019.712087] env[69994]: INFO nova.compute.manager [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Took 1.63 seconds to destroy the instance on the hypervisor. [ 1019.712087] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1019.712087] env[69994]: DEBUG nova.compute.manager [-] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1019.712087] env[69994]: DEBUG nova.network.neutron [-] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1019.759330] env[69994]: DEBUG oslo_concurrency.lockutils [None req-75f39166-1ffc-4040-a8c0-5fa40d188a1e tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1019.963413] env[69994]: DEBUG oslo_concurrency.lockutils [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Releasing lock "refresh_cache-47e80abc-2f7e-432c-bd2f-3064841401fc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1019.963835] env[69994]: DEBUG nova.compute.manager [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Instance network_info: |[{"id": "8c33cc1e-6e3c-4b24-b456-71e80fc23840", "address": "fa:16:3e:68:00:b2", "network": {"id": "14e5b992-c393-4acc-ab6d-ad5983fe2729", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1321568807-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee188ea80c9847188df8b8482b7c6ec7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c33cc1e-6e", "ovs_interfaceid": "8c33cc1e-6e3c-4b24-b456-71e80fc23840", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1019.964102] env[69994]: DEBUG oslo_concurrency.lockutils [req-a86d5601-d470-4dbe-b8ec-5de730f1786e req-eca05810-f6e9-424c-be2e-1e8dd3165891 service nova] Acquired lock "refresh_cache-47e80abc-2f7e-432c-bd2f-3064841401fc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1019.964289] env[69994]: DEBUG nova.network.neutron [req-a86d5601-d470-4dbe-b8ec-5de730f1786e req-eca05810-f6e9-424c-be2e-1e8dd3165891 service nova] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Refreshing network info cache for port 8c33cc1e-6e3c-4b24-b456-71e80fc23840 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1019.965570] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:68:00:b2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e31264e2-3e0a-4dfb-ba1f-6389d7d47548', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8c33cc1e-6e3c-4b24-b456-71e80fc23840', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1019.974807] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1019.978276] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1019.980049] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3bee1b8a-c6c4-447c-90a2-25ff1a601f95 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.001896] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1020.001896] env[69994]: value = "task-3242341" [ 1020.001896] env[69994]: _type = "Task" [ 1020.001896] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.009944] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242341, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.057561] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a138b189-5a21-471b-a3ce-4f0e3b2bbb6e tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "refresh_cache-eff21ec5-a51d-4004-9edf-1891f706fe9c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1020.057782] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a138b189-5a21-471b-a3ce-4f0e3b2bbb6e tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquired lock "refresh_cache-eff21ec5-a51d-4004-9edf-1891f706fe9c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1020.059047] env[69994]: DEBUG nova.network.neutron [None req-a138b189-5a21-471b-a3ce-4f0e3b2bbb6e tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1020.087110] env[69994]: DEBUG nova.network.neutron [-] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1020.176877] env[69994]: DEBUG oslo_vmware.api [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Task: {'id': task-3242340, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.773836} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.180916] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 95b7d534-ac5b-4982-830d-bf65ecd610b3/95b7d534-ac5b-4982-830d-bf65ecd610b3.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1020.181246] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1020.181546] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-99c743ac-46a3-4485-9fa2-38d8b0e1521a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.188721] env[69994]: DEBUG oslo_vmware.api [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Waiting for the task: (returnval){ [ 1020.188721] env[69994]: value = "task-3242342" [ 1020.188721] env[69994]: _type = "Task" [ 1020.188721] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.198790] env[69994]: DEBUG oslo_vmware.api [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Task: {'id': task-3242342, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.206059] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 384889a3-c3d9-4e0e-8d1c-95193cf4343d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1020.303545] env[69994]: DEBUG nova.network.neutron [req-a86d5601-d470-4dbe-b8ec-5de730f1786e req-eca05810-f6e9-424c-be2e-1e8dd3165891 service nova] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Updated VIF entry in instance network info cache for port 8c33cc1e-6e3c-4b24-b456-71e80fc23840. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1020.304079] env[69994]: DEBUG nova.network.neutron [req-a86d5601-d470-4dbe-b8ec-5de730f1786e req-eca05810-f6e9-424c-be2e-1e8dd3165891 service nova] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Updating instance_info_cache with network_info: [{"id": "8c33cc1e-6e3c-4b24-b456-71e80fc23840", "address": "fa:16:3e:68:00:b2", "network": {"id": "14e5b992-c393-4acc-ab6d-ad5983fe2729", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1321568807-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee188ea80c9847188df8b8482b7c6ec7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c33cc1e-6e", "ovs_interfaceid": "8c33cc1e-6e3c-4b24-b456-71e80fc23840", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1020.339597] env[69994]: DEBUG nova.network.neutron [None req-13841513-0a60-4fd7-a7c2-7bf7884d5600 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Updating instance_info_cache with network_info: [{"id": "37af0480-c14f-4941-b963-b25c22c833b3", "address": "fa:16:3e:57:78:19", "network": {"id": "36dc1c22-7ec0-41a6-a3ca-873fea58c351", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1496166079-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30a9ea2f804f49ec8c5c6861b507454e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37af0480-c1", "ovs_interfaceid": "37af0480-c14f-4941-b963-b25c22c833b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1020.497493] env[69994]: DEBUG nova.network.neutron [-] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1020.512594] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242341, 'name': CreateVM_Task, 'duration_secs': 0.459865} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.513549] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1020.514299] env[69994]: DEBUG oslo_concurrency.lockutils [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1020.514512] env[69994]: DEBUG oslo_concurrency.lockutils [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1020.514830] env[69994]: DEBUG oslo_concurrency.lockutils [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1020.515389] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-434ef3b0-59b4-4283-b85e-375578e545a4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.521688] env[69994]: DEBUG oslo_vmware.api [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 1020.521688] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52290b39-667f-c1b4-a9ac-88f51b924086" [ 1020.521688] env[69994]: _type = "Task" [ 1020.521688] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.531159] env[69994]: DEBUG oslo_vmware.api [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52290b39-667f-c1b4-a9ac-88f51b924086, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.590923] env[69994]: INFO nova.compute.manager [-] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Took 1.28 seconds to deallocate network for instance. [ 1020.701181] env[69994]: DEBUG oslo_vmware.api [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Task: {'id': task-3242342, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.110192} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.701442] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1020.702246] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deb75132-f004-43e4-8987-14c30aaad111 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.708250] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 799bf051-86b4-45bd-b9bf-df767074dac8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1020.727148] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] 95b7d534-ac5b-4982-830d-bf65ecd610b3/95b7d534-ac5b-4982-830d-bf65ecd610b3.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1020.728938] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance ab99499b-21a2-465b-9975-4e0adb18df94 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1020.729063] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Total usable vcpus: 48, total allocated vcpus: 14 {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1020.729113] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3200MB phys_disk=200GB used_disk=14GB total_vcpus=48 used_vcpus=14 pci_stats=[] {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1020.733809] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c01723e-c9ff-453d-bd95-5e8198c6d406 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.755300] env[69994]: DEBUG oslo_vmware.api [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Waiting for the task: (returnval){ [ 1020.755300] env[69994]: value = "task-3242343" [ 1020.755300] env[69994]: _type = "Task" [ 1020.755300] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.765188] env[69994]: DEBUG oslo_vmware.api [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Task: {'id': task-3242343, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.807131] env[69994]: DEBUG oslo_concurrency.lockutils [req-a86d5601-d470-4dbe-b8ec-5de730f1786e req-eca05810-f6e9-424c-be2e-1e8dd3165891 service nova] Releasing lock "refresh_cache-47e80abc-2f7e-432c-bd2f-3064841401fc" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1020.818075] env[69994]: DEBUG nova.network.neutron [None req-a138b189-5a21-471b-a3ce-4f0e3b2bbb6e tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Updating instance_info_cache with network_info: [{"id": "f7d5e758-a993-4a15-8bba-a695f99a96f4", "address": "fa:16:3e:f7:92:8c", "network": {"id": "ac8cc640-01c3-4a64-8b69-1458ee2131a1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1685085861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38d5a89ed7c248c3be506ef12caf5f1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7d5e758-a9", "ovs_interfaceid": "f7d5e758-a993-4a15-8bba-a695f99a96f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1020.842617] env[69994]: DEBUG oslo_concurrency.lockutils [None req-13841513-0a60-4fd7-a7c2-7bf7884d5600 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Releasing lock "refresh_cache-ef410b09-8686-409e-8391-d50cd0e0df04" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1020.843272] env[69994]: DEBUG nova.objects.instance [None req-13841513-0a60-4fd7-a7c2-7bf7884d5600 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lazy-loading 'flavor' on Instance uuid ef410b09-8686-409e-8391-d50cd0e0df04 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1021.000831] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-315884b9-7cd8-48bb-bc97-5f04a7e49ce0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.003688] env[69994]: INFO nova.compute.manager [-] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Took 1.29 seconds to deallocate network for instance. [ 1021.013625] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd76ad54-1b78-440f-b154-2ee036597634 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.051539] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c69a1a16-ceea-4614-b2cc-490ad7b2dc38 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.064145] env[69994]: DEBUG oslo_vmware.api [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52290b39-667f-c1b4-a9ac-88f51b924086, 'name': SearchDatastore_Task, 'duration_secs': 0.062527} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.064803] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0516217a-e6ea-4e0e-9bbc-46c59f574357 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.070867] env[69994]: DEBUG oslo_concurrency.lockutils [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1021.072503] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1021.072503] env[69994]: DEBUG oslo_concurrency.lockutils [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1021.072503] env[69994]: DEBUG oslo_concurrency.lockutils [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1021.072503] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1021.072939] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-89b06409-c6c7-4358-bea1-142d532a9ebb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.086608] env[69994]: DEBUG nova.compute.provider_tree [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1021.090924] env[69994]: DEBUG nova.compute.manager [req-6647024c-d42a-4659-88ee-92f1b501dd70 req-d4132cff-b51b-48dd-a63d-44b7585bd921 service nova] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Received event network-vif-deleted-e4706905-12e3-43b1-a83a-409585a96042 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1021.091159] env[69994]: DEBUG nova.compute.manager [req-6647024c-d42a-4659-88ee-92f1b501dd70 req-d4132cff-b51b-48dd-a63d-44b7585bd921 service nova] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Received event network-vif-deleted-cd8d4128-2422-4fb9-989c-0ceb2eb2123a {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1021.091479] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1021.091665] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1021.092810] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22cfef24-77d3-434a-a628-75df1810d730 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.098597] env[69994]: DEBUG oslo_vmware.api [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 1021.098597] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52f8155a-8a94-9b0e-688e-b3b246b793c9" [ 1021.098597] env[69994]: _type = "Task" [ 1021.098597] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.104118] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9c843595-559b-418c-976d-4ad9b9244a6e tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1021.109196] env[69994]: DEBUG oslo_vmware.api [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52f8155a-8a94-9b0e-688e-b3b246b793c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.265542] env[69994]: DEBUG oslo_vmware.api [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Task: {'id': task-3242343, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.321518] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a138b189-5a21-471b-a3ce-4f0e3b2bbb6e tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Releasing lock "refresh_cache-eff21ec5-a51d-4004-9edf-1891f706fe9c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1021.351291] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df7191e5-382d-4945-9549-8ae5b6e2dbb0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.376768] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-13841513-0a60-4fd7-a7c2-7bf7884d5600 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1021.376919] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2a1e98c8-482d-4f38-b9f1-89b6527a8ada {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.383287] env[69994]: DEBUG oslo_vmware.api [None req-13841513-0a60-4fd7-a7c2-7bf7884d5600 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 1021.383287] env[69994]: value = "task-3242344" [ 1021.383287] env[69994]: _type = "Task" [ 1021.383287] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.391364] env[69994]: DEBUG oslo_vmware.api [None req-13841513-0a60-4fd7-a7c2-7bf7884d5600 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242344, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.518860] env[69994]: DEBUG oslo_concurrency.lockutils [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1021.610388] env[69994]: DEBUG oslo_vmware.api [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52f8155a-8a94-9b0e-688e-b3b246b793c9, 'name': SearchDatastore_Task, 'duration_secs': 0.02548} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.611356] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c3c1669-1592-4c5f-ad27-bf70450e2bfe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.615516] env[69994]: ERROR nova.scheduler.client.report [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [req-f3038497-49d2-41ea-89e0-f4615a007ae4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 92ce3c95-4efe-4d04-802b-6b187afc5aa7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f3038497-49d2-41ea-89e0-f4615a007ae4"}]} [ 1021.620042] env[69994]: DEBUG oslo_vmware.api [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 1021.620042] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]525ba833-affe-4a0d-d257-1a76b187f5b7" [ 1021.620042] env[69994]: _type = "Task" [ 1021.620042] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.628732] env[69994]: DEBUG oslo_vmware.api [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]525ba833-affe-4a0d-d257-1a76b187f5b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.635402] env[69994]: DEBUG nova.scheduler.client.report [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Refreshing inventories for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1021.651281] env[69994]: DEBUG nova.scheduler.client.report [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Updating ProviderTree inventory for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1021.651481] env[69994]: DEBUG nova.compute.provider_tree [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1021.665522] env[69994]: DEBUG nova.scheduler.client.report [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Refreshing aggregate associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1021.685281] env[69994]: DEBUG nova.scheduler.client.report [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Refreshing trait associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1021.769098] env[69994]: DEBUG oslo_vmware.api [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Task: {'id': task-3242343, 'name': ReconfigVM_Task, 'duration_secs': 0.899047} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.769430] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Reconfigured VM instance instance-00000054 to attach disk [datastore1] 95b7d534-ac5b-4982-830d-bf65ecd610b3/95b7d534-ac5b-4982-830d-bf65ecd610b3.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1021.770140] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3438e94d-4af8-4aca-8667-e6d5c7586da9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.776696] env[69994]: DEBUG oslo_vmware.api [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Waiting for the task: (returnval){ [ 1021.776696] env[69994]: value = "task-3242345" [ 1021.776696] env[69994]: _type = "Task" [ 1021.776696] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.787607] env[69994]: DEBUG oslo_vmware.api [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Task: {'id': task-3242345, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.828028] env[69994]: DEBUG nova.compute.manager [None req-a138b189-5a21-471b-a3ce-4f0e3b2bbb6e tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1021.828788] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ac20188-61fa-48a6-ac12-05bf911a43b3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.895340] env[69994]: DEBUG oslo_vmware.api [None req-13841513-0a60-4fd7-a7c2-7bf7884d5600 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242344, 'name': PowerOffVM_Task, 'duration_secs': 0.324938} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.895617] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-13841513-0a60-4fd7-a7c2-7bf7884d5600 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1021.900953] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-13841513-0a60-4fd7-a7c2-7bf7884d5600 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Reconfiguring VM instance instance-00000040 to detach disk 2002 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1021.903623] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e59101df-b8a2-4ee3-9c1b-da1fd7c912ba {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.922301] env[69994]: DEBUG oslo_vmware.api [None req-13841513-0a60-4fd7-a7c2-7bf7884d5600 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 1021.922301] env[69994]: value = "task-3242346" [ 1021.922301] env[69994]: _type = "Task" [ 1021.922301] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.931914] env[69994]: DEBUG oslo_vmware.api [None req-13841513-0a60-4fd7-a7c2-7bf7884d5600 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242346, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.965283] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e653bd26-03d0-459c-9be1-21f368029ec7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.972579] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-088a5736-c35a-4090-929b-2abf6cede6a9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.003097] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8212eaa9-36cf-4763-9ee5-5fbbda942c33 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.010413] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ce8d7ff-cb53-46d5-9ef0-28b80137f954 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.023957] env[69994]: DEBUG nova.compute.provider_tree [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1022.131729] env[69994]: DEBUG oslo_vmware.api [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]525ba833-affe-4a0d-d257-1a76b187f5b7, 'name': SearchDatastore_Task, 'duration_secs': 0.035351} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.131729] env[69994]: DEBUG oslo_concurrency.lockutils [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1022.131729] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 47e80abc-2f7e-432c-bd2f-3064841401fc/47e80abc-2f7e-432c-bd2f-3064841401fc.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1022.132228] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b05cbd7a-cf4d-4459-ba21-ceeaa8d59cbe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.138633] env[69994]: DEBUG oslo_vmware.api [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 1022.138633] env[69994]: value = "task-3242347" [ 1022.138633] env[69994]: _type = "Task" [ 1022.138633] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.149671] env[69994]: DEBUG oslo_vmware.api [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242347, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.286721] env[69994]: DEBUG oslo_vmware.api [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Task: {'id': task-3242345, 'name': Rename_Task, 'duration_secs': 0.202664} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.286952] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1022.287217] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-40fcb9ef-41da-46e8-8986-ee14073e3388 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.293207] env[69994]: DEBUG oslo_vmware.api [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Waiting for the task: (returnval){ [ 1022.293207] env[69994]: value = "task-3242348" [ 1022.293207] env[69994]: _type = "Task" [ 1022.293207] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.301138] env[69994]: DEBUG oslo_vmware.api [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Task: {'id': task-3242348, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.432330] env[69994]: DEBUG oslo_vmware.api [None req-13841513-0a60-4fd7-a7c2-7bf7884d5600 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242346, 'name': ReconfigVM_Task, 'duration_secs': 0.349137} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.432593] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-13841513-0a60-4fd7-a7c2-7bf7884d5600 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Reconfigured VM instance instance-00000040 to detach disk 2002 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1022.432781] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-13841513-0a60-4fd7-a7c2-7bf7884d5600 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1022.433053] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0868d862-515b-4d17-84b6-86c63514fc40 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.439978] env[69994]: DEBUG oslo_vmware.api [None req-13841513-0a60-4fd7-a7c2-7bf7884d5600 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 1022.439978] env[69994]: value = "task-3242349" [ 1022.439978] env[69994]: _type = "Task" [ 1022.439978] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.448472] env[69994]: DEBUG oslo_vmware.api [None req-13841513-0a60-4fd7-a7c2-7bf7884d5600 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242349, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.528563] env[69994]: DEBUG nova.scheduler.client.report [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1022.648769] env[69994]: DEBUG oslo_vmware.api [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242347, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.803200] env[69994]: DEBUG oslo_vmware.api [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Task: {'id': task-3242348, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.846806] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-289fa42e-a1e3-412f-8c28-6100136a5de8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.854275] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a138b189-5a21-471b-a3ce-4f0e3b2bbb6e tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Doing hard reboot of VM {{(pid=69994) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1022.854536] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-695b589a-2c65-4919-a47d-eb9f1f78a9c4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.860067] env[69994]: DEBUG oslo_vmware.api [None req-a138b189-5a21-471b-a3ce-4f0e3b2bbb6e tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1022.860067] env[69994]: value = "task-3242350" [ 1022.860067] env[69994]: _type = "Task" [ 1022.860067] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.867830] env[69994]: DEBUG oslo_vmware.api [None req-a138b189-5a21-471b-a3ce-4f0e3b2bbb6e tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242350, 'name': ResetVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.950887] env[69994]: DEBUG oslo_vmware.api [None req-13841513-0a60-4fd7-a7c2-7bf7884d5600 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242349, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.033126] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69994) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1023.033346] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 5.385s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1023.033623] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.573s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1023.035168] env[69994]: INFO nova.compute.claims [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1023.149211] env[69994]: DEBUG oslo_vmware.api [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242347, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.618773} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.149482] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 47e80abc-2f7e-432c-bd2f-3064841401fc/47e80abc-2f7e-432c-bd2f-3064841401fc.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1023.149693] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1023.149938] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c857ab21-787c-4992-aea4-c92be3592e35 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.156587] env[69994]: DEBUG oslo_vmware.api [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 1023.156587] env[69994]: value = "task-3242351" [ 1023.156587] env[69994]: _type = "Task" [ 1023.156587] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.164967] env[69994]: DEBUG oslo_vmware.api [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242351, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.303746] env[69994]: DEBUG oslo_vmware.api [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Task: {'id': task-3242348, 'name': PowerOnVM_Task, 'duration_secs': 0.978648} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.303998] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1023.304215] env[69994]: INFO nova.compute.manager [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Took 9.00 seconds to spawn the instance on the hypervisor. [ 1023.304408] env[69994]: DEBUG nova.compute.manager [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1023.305196] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79a59876-3144-4a16-b602-d5493c136031 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.371919] env[69994]: DEBUG oslo_vmware.api [None req-a138b189-5a21-471b-a3ce-4f0e3b2bbb6e tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242350, 'name': ResetVM_Task, 'duration_secs': 0.095712} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.372161] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a138b189-5a21-471b-a3ce-4f0e3b2bbb6e tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Did hard reboot of VM {{(pid=69994) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1023.372396] env[69994]: DEBUG nova.compute.manager [None req-a138b189-5a21-471b-a3ce-4f0e3b2bbb6e tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1023.373231] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64b7143b-d590-4739-b811-27d1b66f1ee1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.451054] env[69994]: DEBUG oslo_vmware.api [None req-13841513-0a60-4fd7-a7c2-7bf7884d5600 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242349, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.666980] env[69994]: DEBUG oslo_vmware.api [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242351, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075021} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.667292] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1023.668156] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ab35013-6060-46b4-9fc0-d165537be2d7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.691250] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] 47e80abc-2f7e-432c-bd2f-3064841401fc/47e80abc-2f7e-432c-bd2f-3064841401fc.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1023.691561] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ecb4395f-07fc-4ca7-897c-1bc164aa0487 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.711369] env[69994]: DEBUG oslo_vmware.api [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 1023.711369] env[69994]: value = "task-3242352" [ 1023.711369] env[69994]: _type = "Task" [ 1023.711369] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.724017] env[69994]: DEBUG oslo_vmware.api [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242352, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.821058] env[69994]: INFO nova.compute.manager [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Took 28.85 seconds to build instance. [ 1023.886015] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a138b189-5a21-471b-a3ce-4f0e3b2bbb6e tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "eff21ec5-a51d-4004-9edf-1891f706fe9c" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.353s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1023.951914] env[69994]: DEBUG oslo_vmware.api [None req-13841513-0a60-4fd7-a7c2-7bf7884d5600 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242349, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.219012] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04ace5a3-ec54-4fb8-a912-98bbd37a18e3 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Acquiring lock "95b7d534-ac5b-4982-830d-bf65ecd610b3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1024.222963] env[69994]: DEBUG oslo_vmware.api [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242352, 'name': ReconfigVM_Task, 'duration_secs': 0.301874} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.223282] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Reconfigured VM instance instance-00000055 to attach disk [datastore1] 47e80abc-2f7e-432c-bd2f-3064841401fc/47e80abc-2f7e-432c-bd2f-3064841401fc.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1024.224128] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-57da1df0-641e-4290-ac67-7521eab127f7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.233950] env[69994]: DEBUG oslo_vmware.api [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 1024.233950] env[69994]: value = "task-3242353" [ 1024.233950] env[69994]: _type = "Task" [ 1024.233950] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.245707] env[69994]: DEBUG oslo_vmware.api [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242353, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.276524] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-639fd6b3-6679-43ee-94e8-1b8109631307 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.283636] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdc62056-cd6c-4f43-b986-508b1c05abd1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.313195] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4a713f1-2d83-4889-b579-e39e989d0b5c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.320267] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44508568-e6bf-4614-be2e-6770fc78b0e9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.324842] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0d2c8def-2a41-48c6-8973-f235e3093a14 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Lock "95b7d534-ac5b-4982-830d-bf65ecd610b3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.364s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1024.325119] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04ace5a3-ec54-4fb8-a912-98bbd37a18e3 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Lock "95b7d534-ac5b-4982-830d-bf65ecd610b3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.106s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1024.325336] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04ace5a3-ec54-4fb8-a912-98bbd37a18e3 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Acquiring lock "95b7d534-ac5b-4982-830d-bf65ecd610b3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1024.325542] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04ace5a3-ec54-4fb8-a912-98bbd37a18e3 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Lock "95b7d534-ac5b-4982-830d-bf65ecd610b3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1024.325708] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04ace5a3-ec54-4fb8-a912-98bbd37a18e3 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Lock "95b7d534-ac5b-4982-830d-bf65ecd610b3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1024.328291] env[69994]: INFO nova.compute.manager [None req-04ace5a3-ec54-4fb8-a912-98bbd37a18e3 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Terminating instance [ 1024.337493] env[69994]: DEBUG nova.compute.provider_tree [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1024.451607] env[69994]: DEBUG oslo_vmware.api [None req-13841513-0a60-4fd7-a7c2-7bf7884d5600 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242349, 'name': PowerOnVM_Task, 'duration_secs': 1.538313} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.451884] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-13841513-0a60-4fd7-a7c2-7bf7884d5600 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1024.452108] env[69994]: DEBUG nova.compute.manager [None req-13841513-0a60-4fd7-a7c2-7bf7884d5600 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1024.452863] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a821683a-4f62-40ee-80fd-3fe24924e479 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.744542] env[69994]: DEBUG oslo_vmware.api [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242353, 'name': Rename_Task, 'duration_secs': 0.195431} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.744861] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1024.745147] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7b504b78-2da6-41ac-9da0-e13a1ad95fec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.752460] env[69994]: DEBUG oslo_vmware.api [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 1024.752460] env[69994]: value = "task-3242354" [ 1024.752460] env[69994]: _type = "Task" [ 1024.752460] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.762512] env[69994]: DEBUG oslo_vmware.api [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242354, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.843488] env[69994]: DEBUG nova.compute.manager [None req-04ace5a3-ec54-4fb8-a912-98bbd37a18e3 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1024.843855] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-04ace5a3-ec54-4fb8-a912-98bbd37a18e3 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1024.844900] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87a08c83-3d69-42ef-bcc0-2e49a43c5de4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.853283] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-04ace5a3-ec54-4fb8-a912-98bbd37a18e3 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1024.853589] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d49658e3-024b-4b1f-856f-aab832959281 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.860151] env[69994]: DEBUG oslo_vmware.api [None req-04ace5a3-ec54-4fb8-a912-98bbd37a18e3 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Waiting for the task: (returnval){ [ 1024.860151] env[69994]: value = "task-3242355" [ 1024.860151] env[69994]: _type = "Task" [ 1024.860151] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.870324] env[69994]: DEBUG oslo_vmware.api [None req-04ace5a3-ec54-4fb8-a912-98bbd37a18e3 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Task: {'id': task-3242355, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.881120] env[69994]: DEBUG nova.scheduler.client.report [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Updated inventory for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with generation 116 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1024.881475] env[69994]: DEBUG nova.compute.provider_tree [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Updating resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 generation from 116 to 117 during operation: update_inventory {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1024.881672] env[69994]: DEBUG nova.compute.provider_tree [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1025.265197] env[69994]: DEBUG oslo_vmware.api [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242354, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.370740] env[69994]: DEBUG oslo_vmware.api [None req-04ace5a3-ec54-4fb8-a912-98bbd37a18e3 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Task: {'id': task-3242355, 'name': PowerOffVM_Task, 'duration_secs': 0.235046} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.371030] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-04ace5a3-ec54-4fb8-a912-98bbd37a18e3 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1025.371233] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-04ace5a3-ec54-4fb8-a912-98bbd37a18e3 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1025.371496] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-41356574-cfb1-4f15-a388-435190430892 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.387254] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.353s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1025.387930] env[69994]: DEBUG nova.compute.manager [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1025.390619] env[69994]: DEBUG oslo_concurrency.lockutils [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 9.062s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1025.390804] env[69994]: DEBUG nova.objects.instance [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69994) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1025.434184] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-04ace5a3-ec54-4fb8-a912-98bbd37a18e3 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1025.434390] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-04ace5a3-ec54-4fb8-a912-98bbd37a18e3 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1025.434583] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-04ace5a3-ec54-4fb8-a912-98bbd37a18e3 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Deleting the datastore file [datastore1] 95b7d534-ac5b-4982-830d-bf65ecd610b3 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1025.434870] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e56e47ff-e44c-4ea7-bb20-0a3a88b8ffec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.442466] env[69994]: DEBUG oslo_vmware.api [None req-04ace5a3-ec54-4fb8-a912-98bbd37a18e3 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Waiting for the task: (returnval){ [ 1025.442466] env[69994]: value = "task-3242357" [ 1025.442466] env[69994]: _type = "Task" [ 1025.442466] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.451735] env[69994]: DEBUG oslo_vmware.api [None req-04ace5a3-ec54-4fb8-a912-98bbd37a18e3 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Task: {'id': task-3242357, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.659566] env[69994]: DEBUG nova.compute.manager [req-9b701569-8694-44ab-91b8-f2b27c30fe4b req-b001a33c-1d26-4f6b-a563-09ae94d08899 service nova] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Received event network-changed-37af0480-c14f-4941-b963-b25c22c833b3 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1025.659813] env[69994]: DEBUG nova.compute.manager [req-9b701569-8694-44ab-91b8-f2b27c30fe4b req-b001a33c-1d26-4f6b-a563-09ae94d08899 service nova] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Refreshing instance network info cache due to event network-changed-37af0480-c14f-4941-b963-b25c22c833b3. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1025.660141] env[69994]: DEBUG oslo_concurrency.lockutils [req-9b701569-8694-44ab-91b8-f2b27c30fe4b req-b001a33c-1d26-4f6b-a563-09ae94d08899 service nova] Acquiring lock "refresh_cache-ef410b09-8686-409e-8391-d50cd0e0df04" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1025.660398] env[69994]: DEBUG oslo_concurrency.lockutils [req-9b701569-8694-44ab-91b8-f2b27c30fe4b req-b001a33c-1d26-4f6b-a563-09ae94d08899 service nova] Acquired lock "refresh_cache-ef410b09-8686-409e-8391-d50cd0e0df04" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1025.660508] env[69994]: DEBUG nova.network.neutron [req-9b701569-8694-44ab-91b8-f2b27c30fe4b req-b001a33c-1d26-4f6b-a563-09ae94d08899 service nova] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Refreshing network info cache for port 37af0480-c14f-4941-b963-b25c22c833b3 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1025.673738] env[69994]: DEBUG oslo_vmware.rw_handles [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521942bd-ee16-36a0-dafe-c7a607f0ed46/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1025.675704] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71553ec4-5ac3-415f-9d00-27d21e84ea25 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.688014] env[69994]: DEBUG oslo_vmware.rw_handles [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521942bd-ee16-36a0-dafe-c7a607f0ed46/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1025.688972] env[69994]: ERROR oslo_vmware.rw_handles [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521942bd-ee16-36a0-dafe-c7a607f0ed46/disk-0.vmdk due to incomplete transfer. [ 1025.688972] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-6d3efe51-c872-479f-a714-f4271aab9008 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.697345] env[69994]: DEBUG oslo_vmware.rw_handles [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521942bd-ee16-36a0-dafe-c7a607f0ed46/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1025.697652] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Uploaded image b3182bd5-a318-4328-8617-30fb9c61ad32 to the Glance image server {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1025.700586] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Destroying the VM {{(pid=69994) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1025.701351] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-071b863a-220b-4f25-abbb-a5b44d0afa4e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.708115] env[69994]: DEBUG oslo_vmware.api [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1025.708115] env[69994]: value = "task-3242358" [ 1025.708115] env[69994]: _type = "Task" [ 1025.708115] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.720413] env[69994]: DEBUG oslo_vmware.api [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242358, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.763799] env[69994]: DEBUG oslo_vmware.api [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242354, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.896535] env[69994]: DEBUG nova.compute.utils [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1025.901084] env[69994]: DEBUG nova.compute.manager [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1025.901266] env[69994]: DEBUG nova.network.neutron [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1025.948294] env[69994]: DEBUG nova.policy [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b796608acf23444d909343ec20e84175', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '978da68b62d8409da5d8c8a45cd985c0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1025.953247] env[69994]: DEBUG oslo_vmware.api [None req-04ace5a3-ec54-4fb8-a912-98bbd37a18e3 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Task: {'id': task-3242357, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.289754} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.953583] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-04ace5a3-ec54-4fb8-a912-98bbd37a18e3 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1025.953674] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-04ace5a3-ec54-4fb8-a912-98bbd37a18e3 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1025.953835] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-04ace5a3-ec54-4fb8-a912-98bbd37a18e3 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1025.954016] env[69994]: INFO nova.compute.manager [None req-04ace5a3-ec54-4fb8-a912-98bbd37a18e3 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1025.954273] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-04ace5a3-ec54-4fb8-a912-98bbd37a18e3 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1025.954518] env[69994]: DEBUG nova.compute.manager [-] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1025.954617] env[69994]: DEBUG nova.network.neutron [-] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1026.221768] env[69994]: DEBUG oslo_vmware.api [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242358, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.267294] env[69994]: DEBUG oslo_vmware.api [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242354, 'name': PowerOnVM_Task, 'duration_secs': 1.179458} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.268508] env[69994]: DEBUG nova.network.neutron [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Successfully created port: a696f1f6-260e-4d3b-a302-98dc2de9b949 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1026.271362] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1026.271362] env[69994]: INFO nova.compute.manager [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Took 8.55 seconds to spawn the instance on the hypervisor. [ 1026.271362] env[69994]: DEBUG nova.compute.manager [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1026.275034] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26c0b8c0-d29c-478b-a702-5d9a4443f881 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.408094] env[69994]: DEBUG oslo_concurrency.lockutils [None req-635eba0c-1db2-44f1-8fc4-d5878eaa466b tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1026.408094] env[69994]: DEBUG nova.compute.manager [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1026.410944] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.258s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1026.413570] env[69994]: INFO nova.compute.claims [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1026.557191] env[69994]: DEBUG nova.network.neutron [req-9b701569-8694-44ab-91b8-f2b27c30fe4b req-b001a33c-1d26-4f6b-a563-09ae94d08899 service nova] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Updated VIF entry in instance network info cache for port 37af0480-c14f-4941-b963-b25c22c833b3. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1026.557581] env[69994]: DEBUG nova.network.neutron [req-9b701569-8694-44ab-91b8-f2b27c30fe4b req-b001a33c-1d26-4f6b-a563-09ae94d08899 service nova] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Updating instance_info_cache with network_info: [{"id": "37af0480-c14f-4941-b963-b25c22c833b3", "address": "fa:16:3e:57:78:19", "network": {"id": "36dc1c22-7ec0-41a6-a3ca-873fea58c351", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1496166079-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30a9ea2f804f49ec8c5c6861b507454e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37af0480-c1", "ovs_interfaceid": "37af0480-c14f-4941-b963-b25c22c833b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1026.660323] env[69994]: DEBUG nova.network.neutron [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Successfully created port: f3cb974c-43b4-4fda-966b-aee04d3459a9 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1026.720634] env[69994]: DEBUG oslo_vmware.api [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242358, 'name': Destroy_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.796129] env[69994]: INFO nova.compute.manager [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Took 23.76 seconds to build instance. [ 1026.860254] env[69994]: DEBUG nova.network.neutron [-] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.000951] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2e783b19-7a3b-414c-a4fa-047ad5afedf3 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "290e8749-6860-4303-b966-65d2efee5499" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1027.001291] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2e783b19-7a3b-414c-a4fa-047ad5afedf3 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "290e8749-6860-4303-b966-65d2efee5499" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1027.001452] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2e783b19-7a3b-414c-a4fa-047ad5afedf3 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "290e8749-6860-4303-b966-65d2efee5499-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1027.001637] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2e783b19-7a3b-414c-a4fa-047ad5afedf3 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "290e8749-6860-4303-b966-65d2efee5499-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1027.001804] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2e783b19-7a3b-414c-a4fa-047ad5afedf3 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "290e8749-6860-4303-b966-65d2efee5499-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1027.004022] env[69994]: INFO nova.compute.manager [None req-2e783b19-7a3b-414c-a4fa-047ad5afedf3 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Terminating instance [ 1027.061046] env[69994]: DEBUG oslo_concurrency.lockutils [req-9b701569-8694-44ab-91b8-f2b27c30fe4b req-b001a33c-1d26-4f6b-a563-09ae94d08899 service nova] Releasing lock "refresh_cache-ef410b09-8686-409e-8391-d50cd0e0df04" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1027.219690] env[69994]: DEBUG oslo_vmware.api [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242358, 'name': Destroy_Task, 'duration_secs': 1.020399} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.219985] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Destroyed the VM [ 1027.220288] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Deleting Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1027.220533] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a26ee11f-04f7-4ca5-94e7-a337482ce94d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.227327] env[69994]: DEBUG oslo_vmware.api [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1027.227327] env[69994]: value = "task-3242359" [ 1027.227327] env[69994]: _type = "Task" [ 1027.227327] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.235736] env[69994]: DEBUG oslo_vmware.api [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242359, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.298618] env[69994]: DEBUG oslo_concurrency.lockutils [None req-57136c45-e640-42d8-b2cf-e2df5d7fbd00 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "47e80abc-2f7e-432c-bd2f-3064841401fc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.271s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1027.364255] env[69994]: INFO nova.compute.manager [-] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Took 1.41 seconds to deallocate network for instance. [ 1027.425357] env[69994]: DEBUG nova.compute.manager [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1027.448031] env[69994]: DEBUG nova.virt.hardware [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1027.448324] env[69994]: DEBUG nova.virt.hardware [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1027.448488] env[69994]: DEBUG nova.virt.hardware [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1027.448709] env[69994]: DEBUG nova.virt.hardware [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1027.448816] env[69994]: DEBUG nova.virt.hardware [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1027.448964] env[69994]: DEBUG nova.virt.hardware [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1027.449204] env[69994]: DEBUG nova.virt.hardware [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1027.449369] env[69994]: DEBUG nova.virt.hardware [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1027.449538] env[69994]: DEBUG nova.virt.hardware [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1027.449700] env[69994]: DEBUG nova.virt.hardware [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1027.449873] env[69994]: DEBUG nova.virt.hardware [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1027.450769] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4ec7361-295f-41df-8ac9-86db927c8b34 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.461298] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d7ae82d-db91-4eb4-9bba-bcde2fcf2f57 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.485782] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "f00662a9-92e0-4520-9ced-3cfd6e83628b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1027.485973] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "f00662a9-92e0-4520-9ced-3cfd6e83628b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1027.510267] env[69994]: DEBUG nova.compute.manager [None req-2e783b19-7a3b-414c-a4fa-047ad5afedf3 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1027.510586] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2e783b19-7a3b-414c-a4fa-047ad5afedf3 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1027.512150] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28e1a3b0-46c2-4f9e-8ba2-1ab3f147df43 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.520927] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2e783b19-7a3b-414c-a4fa-047ad5afedf3 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1027.524176] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5035c154-11e3-4553-afae-f09ee27fd7c2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.555775] env[69994]: DEBUG oslo_concurrency.lockutils [None req-da8e91c0-1076-451d-bccb-6b2c97adff52 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "47e80abc-2f7e-432c-bd2f-3064841401fc" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1027.556036] env[69994]: DEBUG oslo_concurrency.lockutils [None req-da8e91c0-1076-451d-bccb-6b2c97adff52 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "47e80abc-2f7e-432c-bd2f-3064841401fc" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1027.556225] env[69994]: DEBUG nova.compute.manager [None req-da8e91c0-1076-451d-bccb-6b2c97adff52 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1027.557179] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c8b959e-aea7-4219-88ca-ae62ba114a11 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.565736] env[69994]: DEBUG nova.compute.manager [None req-da8e91c0-1076-451d-bccb-6b2c97adff52 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69994) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1027.566286] env[69994]: DEBUG nova.objects.instance [None req-da8e91c0-1076-451d-bccb-6b2c97adff52 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lazy-loading 'flavor' on Instance uuid 47e80abc-2f7e-432c-bd2f-3064841401fc {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1027.688703] env[69994]: DEBUG nova.compute.manager [req-fd84e7e9-6b55-42ea-977a-b0036c3c2640 req-080dfd58-52d8-42ce-8fe9-57ac0ede5e7a service nova] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Received event network-changed-37af0480-c14f-4941-b963-b25c22c833b3 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1027.688836] env[69994]: DEBUG nova.compute.manager [req-fd84e7e9-6b55-42ea-977a-b0036c3c2640 req-080dfd58-52d8-42ce-8fe9-57ac0ede5e7a service nova] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Refreshing instance network info cache due to event network-changed-37af0480-c14f-4941-b963-b25c22c833b3. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1027.689064] env[69994]: DEBUG oslo_concurrency.lockutils [req-fd84e7e9-6b55-42ea-977a-b0036c3c2640 req-080dfd58-52d8-42ce-8fe9-57ac0ede5e7a service nova] Acquiring lock "refresh_cache-ef410b09-8686-409e-8391-d50cd0e0df04" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.689253] env[69994]: DEBUG oslo_concurrency.lockutils [req-fd84e7e9-6b55-42ea-977a-b0036c3c2640 req-080dfd58-52d8-42ce-8fe9-57ac0ede5e7a service nova] Acquired lock "refresh_cache-ef410b09-8686-409e-8391-d50cd0e0df04" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1027.689435] env[69994]: DEBUG nova.network.neutron [req-fd84e7e9-6b55-42ea-977a-b0036c3c2640 req-080dfd58-52d8-42ce-8fe9-57ac0ede5e7a service nova] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Refreshing network info cache for port 37af0480-c14f-4941-b963-b25c22c833b3 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1027.721242] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ad783f6-7ece-41bd-8338-ba7b6bafcf0e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.732035] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08c5e900-c63a-4ea8-b602-e2f9860f7839 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.739895] env[69994]: DEBUG oslo_vmware.api [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242359, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.766714] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd1efa39-fbc9-44cf-97e6-d0814a7c43d6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.774658] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f812a97-e1e3-496a-9bb6-eb481e98771f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.788025] env[69994]: DEBUG nova.compute.provider_tree [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1027.870834] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04ace5a3-ec54-4fb8-a912-98bbd37a18e3 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1027.990643] env[69994]: DEBUG nova.compute.manager [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1028.241948] env[69994]: DEBUG oslo_vmware.api [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242359, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.291183] env[69994]: DEBUG nova.scheduler.client.report [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1028.346783] env[69994]: DEBUG nova.compute.manager [req-4902c26a-469b-473a-b99b-c2f14959600d req-3d64e644-9f7e-42ac-8a26-1e4b5987c602 service nova] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Received event network-vif-plugged-a696f1f6-260e-4d3b-a302-98dc2de9b949 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1028.347035] env[69994]: DEBUG oslo_concurrency.lockutils [req-4902c26a-469b-473a-b99b-c2f14959600d req-3d64e644-9f7e-42ac-8a26-1e4b5987c602 service nova] Acquiring lock "81bae584-e558-4f96-9696-2510fed5a2e0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1028.347688] env[69994]: DEBUG oslo_concurrency.lockutils [req-4902c26a-469b-473a-b99b-c2f14959600d req-3d64e644-9f7e-42ac-8a26-1e4b5987c602 service nova] Lock "81bae584-e558-4f96-9696-2510fed5a2e0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1028.348017] env[69994]: DEBUG oslo_concurrency.lockutils [req-4902c26a-469b-473a-b99b-c2f14959600d req-3d64e644-9f7e-42ac-8a26-1e4b5987c602 service nova] Lock "81bae584-e558-4f96-9696-2510fed5a2e0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1028.348291] env[69994]: DEBUG nova.compute.manager [req-4902c26a-469b-473a-b99b-c2f14959600d req-3d64e644-9f7e-42ac-8a26-1e4b5987c602 service nova] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] No waiting events found dispatching network-vif-plugged-a696f1f6-260e-4d3b-a302-98dc2de9b949 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1028.348523] env[69994]: WARNING nova.compute.manager [req-4902c26a-469b-473a-b99b-c2f14959600d req-3d64e644-9f7e-42ac-8a26-1e4b5987c602 service nova] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Received unexpected event network-vif-plugged-a696f1f6-260e-4d3b-a302-98dc2de9b949 for instance with vm_state building and task_state spawning. [ 1028.396972] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2e783b19-7a3b-414c-a4fa-047ad5afedf3 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1028.397230] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2e783b19-7a3b-414c-a4fa-047ad5afedf3 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1028.397413] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e783b19-7a3b-414c-a4fa-047ad5afedf3 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Deleting the datastore file [datastore2] 290e8749-6860-4303-b966-65d2efee5499 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1028.397677] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a6290efa-dab9-4c81-8f22-17f56f2aaf60 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.405243] env[69994]: DEBUG oslo_vmware.api [None req-2e783b19-7a3b-414c-a4fa-047ad5afedf3 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1028.405243] env[69994]: value = "task-3242361" [ 1028.405243] env[69994]: _type = "Task" [ 1028.405243] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.413330] env[69994]: DEBUG oslo_vmware.api [None req-2e783b19-7a3b-414c-a4fa-047ad5afedf3 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242361, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.500357] env[69994]: DEBUG nova.network.neutron [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Successfully updated port: a696f1f6-260e-4d3b-a302-98dc2de9b949 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1028.513538] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1028.541559] env[69994]: DEBUG nova.network.neutron [req-fd84e7e9-6b55-42ea-977a-b0036c3c2640 req-080dfd58-52d8-42ce-8fe9-57ac0ede5e7a service nova] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Updated VIF entry in instance network info cache for port 37af0480-c14f-4941-b963-b25c22c833b3. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1028.541925] env[69994]: DEBUG nova.network.neutron [req-fd84e7e9-6b55-42ea-977a-b0036c3c2640 req-080dfd58-52d8-42ce-8fe9-57ac0ede5e7a service nova] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Updating instance_info_cache with network_info: [{"id": "37af0480-c14f-4941-b963-b25c22c833b3", "address": "fa:16:3e:57:78:19", "network": {"id": "36dc1c22-7ec0-41a6-a3ca-873fea58c351", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1496166079-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30a9ea2f804f49ec8c5c6861b507454e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37af0480-c1", "ovs_interfaceid": "37af0480-c14f-4941-b963-b25c22c833b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.573318] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-da8e91c0-1076-451d-bccb-6b2c97adff52 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1028.573717] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e1fb9f7c-032a-4ec5-81a2-eb52415f75e2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.580011] env[69994]: DEBUG oslo_vmware.api [None req-da8e91c0-1076-451d-bccb-6b2c97adff52 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 1028.580011] env[69994]: value = "task-3242362" [ 1028.580011] env[69994]: _type = "Task" [ 1028.580011] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.589367] env[69994]: DEBUG oslo_vmware.api [None req-da8e91c0-1076-451d-bccb-6b2c97adff52 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242362, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.741702] env[69994]: DEBUG oslo_vmware.api [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242359, 'name': RemoveSnapshot_Task, 'duration_secs': 1.068198} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.741991] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Deleted Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1028.742244] env[69994]: INFO nova.compute.manager [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Took 18.23 seconds to snapshot the instance on the hypervisor. [ 1028.799519] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.389s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1028.800114] env[69994]: DEBUG nova.compute.manager [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1028.802712] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.624s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1028.804136] env[69994]: INFO nova.compute.claims [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1028.914856] env[69994]: DEBUG oslo_vmware.api [None req-2e783b19-7a3b-414c-a4fa-047ad5afedf3 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242361, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151146} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.915138] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e783b19-7a3b-414c-a4fa-047ad5afedf3 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1028.915329] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2e783b19-7a3b-414c-a4fa-047ad5afedf3 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1028.915505] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2e783b19-7a3b-414c-a4fa-047ad5afedf3 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1028.915675] env[69994]: INFO nova.compute.manager [None req-2e783b19-7a3b-414c-a4fa-047ad5afedf3 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Took 1.41 seconds to destroy the instance on the hypervisor. [ 1028.915911] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2e783b19-7a3b-414c-a4fa-047ad5afedf3 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1028.916132] env[69994]: DEBUG nova.compute.manager [-] [instance: 290e8749-6860-4303-b966-65d2efee5499] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1028.916242] env[69994]: DEBUG nova.network.neutron [-] [instance: 290e8749-6860-4303-b966-65d2efee5499] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1029.044991] env[69994]: DEBUG oslo_concurrency.lockutils [req-fd84e7e9-6b55-42ea-977a-b0036c3c2640 req-080dfd58-52d8-42ce-8fe9-57ac0ede5e7a service nova] Releasing lock "refresh_cache-ef410b09-8686-409e-8391-d50cd0e0df04" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1029.045305] env[69994]: DEBUG nova.compute.manager [req-fd84e7e9-6b55-42ea-977a-b0036c3c2640 req-080dfd58-52d8-42ce-8fe9-57ac0ede5e7a service nova] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Received event network-vif-deleted-144ed90e-dece-4cae-a85e-25e46c84dd0b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1029.089936] env[69994]: DEBUG oslo_vmware.api [None req-da8e91c0-1076-451d-bccb-6b2c97adff52 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242362, 'name': PowerOffVM_Task, 'duration_secs': 0.243637} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.090272] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-da8e91c0-1076-451d-bccb-6b2c97adff52 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1029.090471] env[69994]: DEBUG nova.compute.manager [None req-da8e91c0-1076-451d-bccb-6b2c97adff52 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1029.091254] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6f66d6b-3e05-4ed2-b926-50bbd678ecb8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.248100] env[69994]: DEBUG nova.compute.manager [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 290e8749-6860-4303-b966-65d2efee5499] Instance disappeared during snapshot {{(pid=69994) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 1029.260700] env[69994]: DEBUG nova.compute.manager [None req-2cc4712b-4c02-46a7-a8df-07146d0cd1a9 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Image not found during clean up b3182bd5-a318-4328-8617-30fb9c61ad32 {{(pid=69994) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4601}} [ 1029.309505] env[69994]: DEBUG nova.compute.utils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1029.311647] env[69994]: DEBUG nova.compute.manager [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1029.312569] env[69994]: DEBUG nova.network.neutron [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1029.370360] env[69994]: DEBUG nova.policy [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5636aa429a354815be1db94c247c99a4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ea1445fce8f849508444c1793de63df1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1029.602648] env[69994]: DEBUG oslo_concurrency.lockutils [None req-da8e91c0-1076-451d-bccb-6b2c97adff52 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "47e80abc-2f7e-432c-bd2f-3064841401fc" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.046s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1029.696164] env[69994]: DEBUG nova.network.neutron [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Successfully created port: 8c465e61-001d-4355-b0db-ac3a4e1aeec6 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1029.746213] env[69994]: DEBUG nova.network.neutron [-] [instance: 290e8749-6860-4303-b966-65d2efee5499] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.816329] env[69994]: DEBUG nova.compute.manager [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1029.915990] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5b31e360-30dd-4434-82d9-6a699a1d8675 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Acquiring lock "29071eb9-6334-4c23-acb4-142c12aa448d" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1029.915990] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5b31e360-30dd-4434-82d9-6a699a1d8675 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "29071eb9-6334-4c23-acb4-142c12aa448d" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1030.027660] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c56e45b0-111d-4f2f-bfd5-f64a40d17ec3 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "47e80abc-2f7e-432c-bd2f-3064841401fc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1030.027660] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c56e45b0-111d-4f2f-bfd5-f64a40d17ec3 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "47e80abc-2f7e-432c-bd2f-3064841401fc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1030.027660] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c56e45b0-111d-4f2f-bfd5-f64a40d17ec3 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "47e80abc-2f7e-432c-bd2f-3064841401fc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1030.027660] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c56e45b0-111d-4f2f-bfd5-f64a40d17ec3 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "47e80abc-2f7e-432c-bd2f-3064841401fc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1030.027660] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c56e45b0-111d-4f2f-bfd5-f64a40d17ec3 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "47e80abc-2f7e-432c-bd2f-3064841401fc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1030.029803] env[69994]: INFO nova.compute.manager [None req-c56e45b0-111d-4f2f-bfd5-f64a40d17ec3 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Terminating instance [ 1030.101648] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ebba390-9505-4469-9005-c02d69b8e2cd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.110156] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47f2e416-183b-4d2d-8d47-10127e6bf9d4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.140987] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff36ec1f-3e9b-4070-9e00-a35fe01ca2af {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.149077] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efa2c3da-0b47-4efb-88e1-0248354f1f7d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.163484] env[69994]: DEBUG nova.compute.provider_tree [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1030.248746] env[69994]: INFO nova.compute.manager [-] [instance: 290e8749-6860-4303-b966-65d2efee5499] Took 1.33 seconds to deallocate network for instance. [ 1030.419218] env[69994]: DEBUG nova.compute.utils [None req-5b31e360-30dd-4434-82d9-6a699a1d8675 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1030.484736] env[69994]: DEBUG nova.compute.manager [req-48dc0184-4616-4f36-ac0f-1a5c8933f193 req-2d316c42-0804-4a43-ae6a-46f54948e9e0 service nova] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Received event network-changed-a696f1f6-260e-4d3b-a302-98dc2de9b949 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1030.484976] env[69994]: DEBUG nova.compute.manager [req-48dc0184-4616-4f36-ac0f-1a5c8933f193 req-2d316c42-0804-4a43-ae6a-46f54948e9e0 service nova] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Refreshing instance network info cache due to event network-changed-a696f1f6-260e-4d3b-a302-98dc2de9b949. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1030.485262] env[69994]: DEBUG oslo_concurrency.lockutils [req-48dc0184-4616-4f36-ac0f-1a5c8933f193 req-2d316c42-0804-4a43-ae6a-46f54948e9e0 service nova] Acquiring lock "refresh_cache-81bae584-e558-4f96-9696-2510fed5a2e0" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.485440] env[69994]: DEBUG oslo_concurrency.lockutils [req-48dc0184-4616-4f36-ac0f-1a5c8933f193 req-2d316c42-0804-4a43-ae6a-46f54948e9e0 service nova] Acquired lock "refresh_cache-81bae584-e558-4f96-9696-2510fed5a2e0" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1030.485696] env[69994]: DEBUG nova.network.neutron [req-48dc0184-4616-4f36-ac0f-1a5c8933f193 req-2d316c42-0804-4a43-ae6a-46f54948e9e0 service nova] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Refreshing network info cache for port a696f1f6-260e-4d3b-a302-98dc2de9b949 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1030.536130] env[69994]: DEBUG nova.compute.manager [None req-c56e45b0-111d-4f2f-bfd5-f64a40d17ec3 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1030.536368] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c56e45b0-111d-4f2f-bfd5-f64a40d17ec3 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1030.537252] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c799c2b3-7c73-4585-a99e-2a7735b27bb2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.546382] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c56e45b0-111d-4f2f-bfd5-f64a40d17ec3 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1030.546696] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-906892ef-3fe6-4fb7-8b58-d6a603b41e9b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.635516] env[69994]: DEBUG nova.network.neutron [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Successfully updated port: f3cb974c-43b4-4fda-966b-aee04d3459a9 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1030.650813] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c56e45b0-111d-4f2f-bfd5-f64a40d17ec3 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1030.651115] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c56e45b0-111d-4f2f-bfd5-f64a40d17ec3 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1030.651418] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c56e45b0-111d-4f2f-bfd5-f64a40d17ec3 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Deleting the datastore file [datastore1] 47e80abc-2f7e-432c-bd2f-3064841401fc {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1030.651684] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-14024682-eb60-48ec-b2cf-6b5bc20dcb28 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.657973] env[69994]: DEBUG oslo_vmware.api [None req-c56e45b0-111d-4f2f-bfd5-f64a40d17ec3 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 1030.657973] env[69994]: value = "task-3242364" [ 1030.657973] env[69994]: _type = "Task" [ 1030.657973] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.669224] env[69994]: DEBUG oslo_vmware.api [None req-c56e45b0-111d-4f2f-bfd5-f64a40d17ec3 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242364, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.686242] env[69994]: ERROR nova.scheduler.client.report [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [req-19cc4cf6-49b2-4f06-aa58-fdfb6558799c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 92ce3c95-4efe-4d04-802b-6b187afc5aa7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-19cc4cf6-49b2-4f06-aa58-fdfb6558799c"}]} [ 1030.703389] env[69994]: DEBUG nova.scheduler.client.report [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Refreshing inventories for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1030.716722] env[69994]: DEBUG nova.scheduler.client.report [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Updating ProviderTree inventory for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1030.716965] env[69994]: DEBUG nova.compute.provider_tree [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1030.728988] env[69994]: DEBUG nova.scheduler.client.report [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Refreshing aggregate associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1030.747047] env[69994]: DEBUG nova.scheduler.client.report [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Refreshing trait associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1030.754554] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2e783b19-7a3b-414c-a4fa-047ad5afedf3 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1030.827361] env[69994]: DEBUG nova.compute.manager [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1030.855977] env[69994]: DEBUG nova.virt.hardware [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1030.856279] env[69994]: DEBUG nova.virt.hardware [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1030.856713] env[69994]: DEBUG nova.virt.hardware [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1030.856942] env[69994]: DEBUG nova.virt.hardware [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1030.857112] env[69994]: DEBUG nova.virt.hardware [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1030.857266] env[69994]: DEBUG nova.virt.hardware [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1030.857474] env[69994]: DEBUG nova.virt.hardware [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1030.857635] env[69994]: DEBUG nova.virt.hardware [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1030.857803] env[69994]: DEBUG nova.virt.hardware [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1030.857968] env[69994]: DEBUG nova.virt.hardware [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1030.858155] env[69994]: DEBUG nova.virt.hardware [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1030.859019] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cad15cdd-97dd-42e7-889e-a70191384e64 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.867870] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-426697ca-a4f1-49fe-81d6-600830892d25 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.923093] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5b31e360-30dd-4434-82d9-6a699a1d8675 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "29071eb9-6334-4c23-acb4-142c12aa448d" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1031.020319] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d71bd401-54e5-41be-b10f-b13396731bfc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.028693] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d689a8b-d149-4c25-8aac-1d8e80d4b17b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.061745] env[69994]: DEBUG nova.network.neutron [req-48dc0184-4616-4f36-ac0f-1a5c8933f193 req-2d316c42-0804-4a43-ae6a-46f54948e9e0 service nova] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1031.064035] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a04d29e-a70c-4d63-982d-4d67870b5f08 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.072023] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d34df3d9-6b7b-4622-90ad-efc6373d84f0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.090522] env[69994]: DEBUG nova.compute.provider_tree [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1031.138364] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Acquiring lock "refresh_cache-81bae584-e558-4f96-9696-2510fed5a2e0" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.168243] env[69994]: DEBUG oslo_vmware.api [None req-c56e45b0-111d-4f2f-bfd5-f64a40d17ec3 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242364, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139624} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.168614] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-c56e45b0-111d-4f2f-bfd5-f64a40d17ec3 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1031.168871] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c56e45b0-111d-4f2f-bfd5-f64a40d17ec3 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1031.169130] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c56e45b0-111d-4f2f-bfd5-f64a40d17ec3 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1031.169395] env[69994]: INFO nova.compute.manager [None req-c56e45b0-111d-4f2f-bfd5-f64a40d17ec3 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1031.169721] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c56e45b0-111d-4f2f-bfd5-f64a40d17ec3 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1031.170397] env[69994]: DEBUG nova.compute.manager [-] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1031.170460] env[69994]: DEBUG nova.network.neutron [-] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1031.181986] env[69994]: DEBUG nova.network.neutron [req-48dc0184-4616-4f36-ac0f-1a5c8933f193 req-2d316c42-0804-4a43-ae6a-46f54948e9e0 service nova] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1031.275925] env[69994]: DEBUG nova.network.neutron [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Successfully updated port: 8c465e61-001d-4355-b0db-ac3a4e1aeec6 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1031.427879] env[69994]: DEBUG nova.compute.manager [req-74ada316-9ed3-46c8-b77b-b4f82240d913 req-d52e45c2-3292-4f6a-b6a2-ba1017a0424f service nova] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Received event network-vif-deleted-8c33cc1e-6e3c-4b24-b456-71e80fc23840 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1031.428062] env[69994]: INFO nova.compute.manager [req-74ada316-9ed3-46c8-b77b-b4f82240d913 req-d52e45c2-3292-4f6a-b6a2-ba1017a0424f service nova] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Neutron deleted interface 8c33cc1e-6e3c-4b24-b456-71e80fc23840; detaching it from the instance and deleting it from the info cache [ 1031.428255] env[69994]: DEBUG nova.network.neutron [req-74ada316-9ed3-46c8-b77b-b4f82240d913 req-d52e45c2-3292-4f6a-b6a2-ba1017a0424f service nova] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1031.625190] env[69994]: DEBUG nova.scheduler.client.report [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Updated inventory for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with generation 118 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1031.625507] env[69994]: DEBUG nova.compute.provider_tree [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Updating resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 generation from 118 to 119 during operation: update_inventory {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1031.625722] env[69994]: DEBUG nova.compute.provider_tree [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1031.684493] env[69994]: DEBUG oslo_concurrency.lockutils [req-48dc0184-4616-4f36-ac0f-1a5c8933f193 req-2d316c42-0804-4a43-ae6a-46f54948e9e0 service nova] Releasing lock "refresh_cache-81bae584-e558-4f96-9696-2510fed5a2e0" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1031.684829] env[69994]: DEBUG nova.compute.manager [req-48dc0184-4616-4f36-ac0f-1a5c8933f193 req-2d316c42-0804-4a43-ae6a-46f54948e9e0 service nova] [instance: 290e8749-6860-4303-b966-65d2efee5499] Received event network-vif-deleted-cbe6409a-2178-4561-aae1-8f9071a0c976 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1031.685240] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Acquired lock "refresh_cache-81bae584-e558-4f96-9696-2510fed5a2e0" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1031.685397] env[69994]: DEBUG nova.network.neutron [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1031.778245] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Acquiring lock "refresh_cache-071151e4-a3ee-4a89-8b83-19bef3fb7d3e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.778363] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Acquired lock "refresh_cache-071151e4-a3ee-4a89-8b83-19bef3fb7d3e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1031.778565] env[69994]: DEBUG nova.network.neutron [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1031.886981] env[69994]: DEBUG nova.network.neutron [-] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1031.931870] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-060e53c7-994c-4026-be86-af4bfcc2e047 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.941554] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac8d8c07-96cd-4240-a0f7-4d7e895b30d4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.973113] env[69994]: DEBUG nova.compute.manager [req-74ada316-9ed3-46c8-b77b-b4f82240d913 req-d52e45c2-3292-4f6a-b6a2-ba1017a0424f service nova] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Detach interface failed, port_id=8c33cc1e-6e3c-4b24-b456-71e80fc23840, reason: Instance 47e80abc-2f7e-432c-bd2f-3064841401fc could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1031.993080] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5b31e360-30dd-4434-82d9-6a699a1d8675 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Acquiring lock "29071eb9-6334-4c23-acb4-142c12aa448d" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1031.993283] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5b31e360-30dd-4434-82d9-6a699a1d8675 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "29071eb9-6334-4c23-acb4-142c12aa448d" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1031.993519] env[69994]: INFO nova.compute.manager [None req-5b31e360-30dd-4434-82d9-6a699a1d8675 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Attaching volume a81b55d5-7771-408e-8f27-6b9ddfd1b4d9 to /dev/sdb [ 1032.022998] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cae570ab-02cb-4eae-a2d1-2d84b826ed31 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.030611] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-486fd261-3605-4d10-ba59-0bd31b5fcc54 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.043969] env[69994]: DEBUG nova.virt.block_device [None req-5b31e360-30dd-4434-82d9-6a699a1d8675 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Updating existing volume attachment record: 6d17e717-b5d3-47ad-88cc-81cb5e96d0da {{(pid=69994) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1032.131665] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.328s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1032.131828] env[69994]: DEBUG nova.compute.manager [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1032.134636] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.923s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1032.136144] env[69994]: INFO nova.compute.claims [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1032.219492] env[69994]: DEBUG nova.network.neutron [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1032.321578] env[69994]: DEBUG nova.network.neutron [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1032.389781] env[69994]: INFO nova.compute.manager [-] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Took 1.22 seconds to deallocate network for instance. [ 1032.485271] env[69994]: DEBUG nova.network.neutron [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Updating instance_info_cache with network_info: [{"id": "8c465e61-001d-4355-b0db-ac3a4e1aeec6", "address": "fa:16:3e:08:58:d3", "network": {"id": "927a1a97-aee6-47bc-b938-a8aeb845acdc", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-363045651-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea1445fce8f849508444c1793de63df1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c465e61-00", "ovs_interfaceid": "8c465e61-001d-4355-b0db-ac3a4e1aeec6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1032.522278] env[69994]: DEBUG nova.compute.manager [req-8a384154-b1b2-49a1-a72f-6e27108793ab req-7e3a11f9-f0d8-4815-9e43-5c8dcde79f01 service nova] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Received event network-vif-plugged-f3cb974c-43b4-4fda-966b-aee04d3459a9 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1032.522704] env[69994]: DEBUG oslo_concurrency.lockutils [req-8a384154-b1b2-49a1-a72f-6e27108793ab req-7e3a11f9-f0d8-4815-9e43-5c8dcde79f01 service nova] Acquiring lock "81bae584-e558-4f96-9696-2510fed5a2e0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1032.522704] env[69994]: DEBUG oslo_concurrency.lockutils [req-8a384154-b1b2-49a1-a72f-6e27108793ab req-7e3a11f9-f0d8-4815-9e43-5c8dcde79f01 service nova] Lock "81bae584-e558-4f96-9696-2510fed5a2e0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1032.522875] env[69994]: DEBUG oslo_concurrency.lockutils [req-8a384154-b1b2-49a1-a72f-6e27108793ab req-7e3a11f9-f0d8-4815-9e43-5c8dcde79f01 service nova] Lock "81bae584-e558-4f96-9696-2510fed5a2e0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1032.523064] env[69994]: DEBUG nova.compute.manager [req-8a384154-b1b2-49a1-a72f-6e27108793ab req-7e3a11f9-f0d8-4815-9e43-5c8dcde79f01 service nova] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] No waiting events found dispatching network-vif-plugged-f3cb974c-43b4-4fda-966b-aee04d3459a9 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1032.523203] env[69994]: WARNING nova.compute.manager [req-8a384154-b1b2-49a1-a72f-6e27108793ab req-7e3a11f9-f0d8-4815-9e43-5c8dcde79f01 service nova] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Received unexpected event network-vif-plugged-f3cb974c-43b4-4fda-966b-aee04d3459a9 for instance with vm_state building and task_state spawning. [ 1032.523373] env[69994]: DEBUG nova.compute.manager [req-8a384154-b1b2-49a1-a72f-6e27108793ab req-7e3a11f9-f0d8-4815-9e43-5c8dcde79f01 service nova] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Received event network-changed-f3cb974c-43b4-4fda-966b-aee04d3459a9 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1032.523508] env[69994]: DEBUG nova.compute.manager [req-8a384154-b1b2-49a1-a72f-6e27108793ab req-7e3a11f9-f0d8-4815-9e43-5c8dcde79f01 service nova] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Refreshing instance network info cache due to event network-changed-f3cb974c-43b4-4fda-966b-aee04d3459a9. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1032.523679] env[69994]: DEBUG oslo_concurrency.lockutils [req-8a384154-b1b2-49a1-a72f-6e27108793ab req-7e3a11f9-f0d8-4815-9e43-5c8dcde79f01 service nova] Acquiring lock "refresh_cache-81bae584-e558-4f96-9696-2510fed5a2e0" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.640322] env[69994]: DEBUG nova.compute.utils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1032.647022] env[69994]: DEBUG nova.compute.manager [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1032.647022] env[69994]: DEBUG nova.network.neutron [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1032.656896] env[69994]: DEBUG nova.network.neutron [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Updating instance_info_cache with network_info: [{"id": "a696f1f6-260e-4d3b-a302-98dc2de9b949", "address": "fa:16:3e:78:5f:51", "network": {"id": "d91c0b1b-2c9c-459f-a50b-775d31850cf6", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-992244972", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.118", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "978da68b62d8409da5d8c8a45cd985c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e85cbc56-fee0-41f7-bc70-64f31775ce92", "external-id": "nsx-vlan-transportzone-793", "segmentation_id": 793, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa696f1f6-26", "ovs_interfaceid": "a696f1f6-260e-4d3b-a302-98dc2de9b949", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f3cb974c-43b4-4fda-966b-aee04d3459a9", "address": "fa:16:3e:51:b7:6e", "network": {"id": "e93dab81-5f07-43a3-9679-60bc0c54bc79", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-5660800", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.147", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "978da68b62d8409da5d8c8a45cd985c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3cb974c-43", "ovs_interfaceid": "f3cb974c-43b4-4fda-966b-aee04d3459a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1032.682514] env[69994]: DEBUG nova.policy [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5636aa429a354815be1db94c247c99a4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ea1445fce8f849508444c1793de63df1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1032.897151] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c56e45b0-111d-4f2f-bfd5-f64a40d17ec3 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1032.900219] env[69994]: DEBUG nova.network.neutron [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Successfully created port: 83645e16-3809-4855-9874-a71858e590d0 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1032.988671] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Releasing lock "refresh_cache-071151e4-a3ee-4a89-8b83-19bef3fb7d3e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1032.989017] env[69994]: DEBUG nova.compute.manager [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Instance network_info: |[{"id": "8c465e61-001d-4355-b0db-ac3a4e1aeec6", "address": "fa:16:3e:08:58:d3", "network": {"id": "927a1a97-aee6-47bc-b938-a8aeb845acdc", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-363045651-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea1445fce8f849508444c1793de63df1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c465e61-00", "ovs_interfaceid": "8c465e61-001d-4355-b0db-ac3a4e1aeec6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1032.992943] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:08:58:d3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7150f662-0cf1-44f9-ae14-d70f479649b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8c465e61-001d-4355-b0db-ac3a4e1aeec6', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1032.998692] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Creating folder: Project (ea1445fce8f849508444c1793de63df1). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1032.998795] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3d52209b-6b25-4b61-b141-79fbb12d3c10 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.009790] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Created folder: Project (ea1445fce8f849508444c1793de63df1) in parent group-v647729. [ 1033.009790] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Creating folder: Instances. Parent ref: group-v647984. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1033.009790] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-100369b3-92ee-4702-9ab2-549a5faea39b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.018961] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Created folder: Instances in parent group-v647984. [ 1033.019154] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1033.019337] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1033.019529] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eb4085ba-24fa-420e-9a44-0df1d2c6d3fb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.038314] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1033.038314] env[69994]: value = "task-3242368" [ 1033.038314] env[69994]: _type = "Task" [ 1033.038314] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.045973] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242368, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.144970] env[69994]: DEBUG nova.compute.manager [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1033.160851] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Releasing lock "refresh_cache-81bae584-e558-4f96-9696-2510fed5a2e0" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1033.160851] env[69994]: DEBUG nova.compute.manager [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Instance network_info: |[{"id": "a696f1f6-260e-4d3b-a302-98dc2de9b949", "address": "fa:16:3e:78:5f:51", "network": {"id": "d91c0b1b-2c9c-459f-a50b-775d31850cf6", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-992244972", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.118", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "978da68b62d8409da5d8c8a45cd985c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e85cbc56-fee0-41f7-bc70-64f31775ce92", "external-id": "nsx-vlan-transportzone-793", "segmentation_id": 793, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa696f1f6-26", "ovs_interfaceid": "a696f1f6-260e-4d3b-a302-98dc2de9b949", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f3cb974c-43b4-4fda-966b-aee04d3459a9", "address": "fa:16:3e:51:b7:6e", "network": {"id": "e93dab81-5f07-43a3-9679-60bc0c54bc79", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-5660800", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.147", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "978da68b62d8409da5d8c8a45cd985c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3cb974c-43", "ovs_interfaceid": "f3cb974c-43b4-4fda-966b-aee04d3459a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1033.161104] env[69994]: DEBUG oslo_concurrency.lockutils [req-8a384154-b1b2-49a1-a72f-6e27108793ab req-7e3a11f9-f0d8-4815-9e43-5c8dcde79f01 service nova] Acquired lock "refresh_cache-81bae584-e558-4f96-9696-2510fed5a2e0" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1033.161327] env[69994]: DEBUG nova.network.neutron [req-8a384154-b1b2-49a1-a72f-6e27108793ab req-7e3a11f9-f0d8-4815-9e43-5c8dcde79f01 service nova] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Refreshing network info cache for port f3cb974c-43b4-4fda-966b-aee04d3459a9 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1033.162802] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:78:5f:51', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e85cbc56-fee0-41f7-bc70-64f31775ce92', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a696f1f6-260e-4d3b-a302-98dc2de9b949', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:51:b7:6e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d2742ba-c3af-4412-877d-c2811dfeba46', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f3cb974c-43b4-4fda-966b-aee04d3459a9', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1033.173741] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1033.174982] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1033.177878] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aa2ce471-d95e-4c97-b023-3240d3b562e5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.202059] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1033.202059] env[69994]: value = "task-3242369" [ 1033.202059] env[69994]: _type = "Task" [ 1033.202059] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.212456] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242369, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.438319] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bf1cd81-dcf5-40e8-98df-4e8f8356e6c5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.445510] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ba4bea0-dfd5-417d-965f-06a76b4bf2b1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.477907] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e64eb826-5823-43b2-8477-a3885a5d8100 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.485567] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2004331e-7e52-4c10-aced-13d26c1ecba1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.499691] env[69994]: DEBUG nova.compute.provider_tree [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1033.547926] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242368, 'name': CreateVM_Task, 'duration_secs': 0.361892} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.548189] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1033.548900] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.549084] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1033.549422] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1033.549677] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-82f22c0d-d2e5-4b78-8f74-1fef567c2162 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.554611] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Waiting for the task: (returnval){ [ 1033.554611] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5252a8d3-de37-f71a-3deb-05aa026e26d0" [ 1033.554611] env[69994]: _type = "Task" [ 1033.554611] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.562643] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5252a8d3-de37-f71a-3deb-05aa026e26d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.713888] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242369, 'name': CreateVM_Task, 'duration_secs': 0.43172} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.713888] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1033.713888] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.713888] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1033.714052] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1033.714247] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c21d09c0-6e0e-44de-8410-18343e647c1f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.720809] env[69994]: DEBUG oslo_vmware.api [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Waiting for the task: (returnval){ [ 1033.720809] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52274d8c-8553-82d8-48ad-29174c474dc5" [ 1033.720809] env[69994]: _type = "Task" [ 1033.720809] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.730340] env[69994]: DEBUG oslo_vmware.api [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52274d8c-8553-82d8-48ad-29174c474dc5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.997284] env[69994]: DEBUG nova.network.neutron [req-8a384154-b1b2-49a1-a72f-6e27108793ab req-7e3a11f9-f0d8-4815-9e43-5c8dcde79f01 service nova] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Updated VIF entry in instance network info cache for port f3cb974c-43b4-4fda-966b-aee04d3459a9. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1033.997728] env[69994]: DEBUG nova.network.neutron [req-8a384154-b1b2-49a1-a72f-6e27108793ab req-7e3a11f9-f0d8-4815-9e43-5c8dcde79f01 service nova] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Updating instance_info_cache with network_info: [{"id": "a696f1f6-260e-4d3b-a302-98dc2de9b949", "address": "fa:16:3e:78:5f:51", "network": {"id": "d91c0b1b-2c9c-459f-a50b-775d31850cf6", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-992244972", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.118", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "978da68b62d8409da5d8c8a45cd985c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e85cbc56-fee0-41f7-bc70-64f31775ce92", "external-id": "nsx-vlan-transportzone-793", "segmentation_id": 793, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa696f1f6-26", "ovs_interfaceid": "a696f1f6-260e-4d3b-a302-98dc2de9b949", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f3cb974c-43b4-4fda-966b-aee04d3459a9", "address": "fa:16:3e:51:b7:6e", "network": {"id": "e93dab81-5f07-43a3-9679-60bc0c54bc79", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-5660800", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.147", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "978da68b62d8409da5d8c8a45cd985c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3cb974c-43", "ovs_interfaceid": "f3cb974c-43b4-4fda-966b-aee04d3459a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1034.003784] env[69994]: DEBUG nova.scheduler.client.report [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1034.065951] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5252a8d3-de37-f71a-3deb-05aa026e26d0, 'name': SearchDatastore_Task, 'duration_secs': 0.012563} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.066295] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1034.066537] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1034.066774] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.066921] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1034.067118] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1034.067384] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d5989f3b-fca6-4e1e-8d6f-34c482c2cd42 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.075831] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1034.076029] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1034.076772] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a77c9a6c-134d-4255-9245-4ad97684e199 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.082063] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Waiting for the task: (returnval){ [ 1034.082063] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]523b6555-ea20-fc1e-b034-f317c6fe8c18" [ 1034.082063] env[69994]: _type = "Task" [ 1034.082063] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.089269] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]523b6555-ea20-fc1e-b034-f317c6fe8c18, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.157392] env[69994]: DEBUG nova.compute.manager [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1034.178370] env[69994]: DEBUG nova.virt.hardware [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1034.178499] env[69994]: DEBUG nova.virt.hardware [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1034.179052] env[69994]: DEBUG nova.virt.hardware [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1034.179052] env[69994]: DEBUG nova.virt.hardware [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1034.179052] env[69994]: DEBUG nova.virt.hardware [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1034.179212] env[69994]: DEBUG nova.virt.hardware [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1034.179322] env[69994]: DEBUG nova.virt.hardware [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1034.179481] env[69994]: DEBUG nova.virt.hardware [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1034.179646] env[69994]: DEBUG nova.virt.hardware [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1034.179807] env[69994]: DEBUG nova.virt.hardware [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1034.179976] env[69994]: DEBUG nova.virt.hardware [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1034.180892] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01222f63-d811-4db3-a658-8969a4c884ab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.188981] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e1748aa-740c-432e-b063-b4ff9c3d7421 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.230021] env[69994]: DEBUG oslo_vmware.api [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52274d8c-8553-82d8-48ad-29174c474dc5, 'name': SearchDatastore_Task, 'duration_secs': 0.011014} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.230247] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1034.230484] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1034.230713] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.230892] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1034.231214] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1034.231445] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7d2ded2d-bbb2-4b2d-8ea1-602b38385d7c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.239157] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1034.239339] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1034.240110] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-784a0ca8-e7b5-4193-bb1d-2cd9b33c63ba {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.245076] env[69994]: DEBUG oslo_vmware.api [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Waiting for the task: (returnval){ [ 1034.245076] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]529d4ac0-d9b0-8a0e-3264-0308ad3ada9f" [ 1034.245076] env[69994]: _type = "Task" [ 1034.245076] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.252543] env[69994]: DEBUG oslo_vmware.api [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]529d4ac0-d9b0-8a0e-3264-0308ad3ada9f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.341435] env[69994]: DEBUG nova.network.neutron [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Successfully updated port: 83645e16-3809-4855-9874-a71858e590d0 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1034.500840] env[69994]: DEBUG oslo_concurrency.lockutils [req-8a384154-b1b2-49a1-a72f-6e27108793ab req-7e3a11f9-f0d8-4815-9e43-5c8dcde79f01 service nova] Releasing lock "refresh_cache-81bae584-e558-4f96-9696-2510fed5a2e0" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1034.501180] env[69994]: DEBUG nova.compute.manager [req-8a384154-b1b2-49a1-a72f-6e27108793ab req-7e3a11f9-f0d8-4815-9e43-5c8dcde79f01 service nova] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Received event network-vif-plugged-8c465e61-001d-4355-b0db-ac3a4e1aeec6 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1034.501332] env[69994]: DEBUG oslo_concurrency.lockutils [req-8a384154-b1b2-49a1-a72f-6e27108793ab req-7e3a11f9-f0d8-4815-9e43-5c8dcde79f01 service nova] Acquiring lock "071151e4-a3ee-4a89-8b83-19bef3fb7d3e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.501546] env[69994]: DEBUG oslo_concurrency.lockutils [req-8a384154-b1b2-49a1-a72f-6e27108793ab req-7e3a11f9-f0d8-4815-9e43-5c8dcde79f01 service nova] Lock "071151e4-a3ee-4a89-8b83-19bef3fb7d3e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.501711] env[69994]: DEBUG oslo_concurrency.lockutils [req-8a384154-b1b2-49a1-a72f-6e27108793ab req-7e3a11f9-f0d8-4815-9e43-5c8dcde79f01 service nova] Lock "071151e4-a3ee-4a89-8b83-19bef3fb7d3e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1034.501878] env[69994]: DEBUG nova.compute.manager [req-8a384154-b1b2-49a1-a72f-6e27108793ab req-7e3a11f9-f0d8-4815-9e43-5c8dcde79f01 service nova] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] No waiting events found dispatching network-vif-plugged-8c465e61-001d-4355-b0db-ac3a4e1aeec6 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1034.502395] env[69994]: WARNING nova.compute.manager [req-8a384154-b1b2-49a1-a72f-6e27108793ab req-7e3a11f9-f0d8-4815-9e43-5c8dcde79f01 service nova] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Received unexpected event network-vif-plugged-8c465e61-001d-4355-b0db-ac3a4e1aeec6 for instance with vm_state building and task_state spawning. [ 1034.502395] env[69994]: DEBUG nova.compute.manager [req-8a384154-b1b2-49a1-a72f-6e27108793ab req-7e3a11f9-f0d8-4815-9e43-5c8dcde79f01 service nova] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Received event network-changed-8c465e61-001d-4355-b0db-ac3a4e1aeec6 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1034.502395] env[69994]: DEBUG nova.compute.manager [req-8a384154-b1b2-49a1-a72f-6e27108793ab req-7e3a11f9-f0d8-4815-9e43-5c8dcde79f01 service nova] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Refreshing instance network info cache due to event network-changed-8c465e61-001d-4355-b0db-ac3a4e1aeec6. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1034.502558] env[69994]: DEBUG oslo_concurrency.lockutils [req-8a384154-b1b2-49a1-a72f-6e27108793ab req-7e3a11f9-f0d8-4815-9e43-5c8dcde79f01 service nova] Acquiring lock "refresh_cache-071151e4-a3ee-4a89-8b83-19bef3fb7d3e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.502664] env[69994]: DEBUG oslo_concurrency.lockutils [req-8a384154-b1b2-49a1-a72f-6e27108793ab req-7e3a11f9-f0d8-4815-9e43-5c8dcde79f01 service nova] Acquired lock "refresh_cache-071151e4-a3ee-4a89-8b83-19bef3fb7d3e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1034.502950] env[69994]: DEBUG nova.network.neutron [req-8a384154-b1b2-49a1-a72f-6e27108793ab req-7e3a11f9-f0d8-4815-9e43-5c8dcde79f01 service nova] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Refreshing network info cache for port 8c465e61-001d-4355-b0db-ac3a4e1aeec6 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1034.508152] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.373s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1034.508734] env[69994]: DEBUG nova.compute.manager [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1034.512111] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.272s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.513801] env[69994]: INFO nova.compute.claims [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1034.549900] env[69994]: DEBUG nova.compute.manager [req-fe16410b-c4f3-4dc2-a367-72244aee7a23 req-46ff31e1-2e0c-4749-b8fb-bc887dff16e5 service nova] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Received event network-vif-plugged-83645e16-3809-4855-9874-a71858e590d0 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1034.550219] env[69994]: DEBUG oslo_concurrency.lockutils [req-fe16410b-c4f3-4dc2-a367-72244aee7a23 req-46ff31e1-2e0c-4749-b8fb-bc887dff16e5 service nova] Acquiring lock "384889a3-c3d9-4e0e-8d1c-95193cf4343d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.550514] env[69994]: DEBUG oslo_concurrency.lockutils [req-fe16410b-c4f3-4dc2-a367-72244aee7a23 req-46ff31e1-2e0c-4749-b8fb-bc887dff16e5 service nova] Lock "384889a3-c3d9-4e0e-8d1c-95193cf4343d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.550804] env[69994]: DEBUG oslo_concurrency.lockutils [req-fe16410b-c4f3-4dc2-a367-72244aee7a23 req-46ff31e1-2e0c-4749-b8fb-bc887dff16e5 service nova] Lock "384889a3-c3d9-4e0e-8d1c-95193cf4343d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1034.551098] env[69994]: DEBUG nova.compute.manager [req-fe16410b-c4f3-4dc2-a367-72244aee7a23 req-46ff31e1-2e0c-4749-b8fb-bc887dff16e5 service nova] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] No waiting events found dispatching network-vif-plugged-83645e16-3809-4855-9874-a71858e590d0 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1034.551296] env[69994]: WARNING nova.compute.manager [req-fe16410b-c4f3-4dc2-a367-72244aee7a23 req-46ff31e1-2e0c-4749-b8fb-bc887dff16e5 service nova] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Received unexpected event network-vif-plugged-83645e16-3809-4855-9874-a71858e590d0 for instance with vm_state building and task_state spawning. [ 1034.551459] env[69994]: DEBUG nova.compute.manager [req-fe16410b-c4f3-4dc2-a367-72244aee7a23 req-46ff31e1-2e0c-4749-b8fb-bc887dff16e5 service nova] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Received event network-changed-83645e16-3809-4855-9874-a71858e590d0 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1034.551613] env[69994]: DEBUG nova.compute.manager [req-fe16410b-c4f3-4dc2-a367-72244aee7a23 req-46ff31e1-2e0c-4749-b8fb-bc887dff16e5 service nova] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Refreshing instance network info cache due to event network-changed-83645e16-3809-4855-9874-a71858e590d0. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1034.551796] env[69994]: DEBUG oslo_concurrency.lockutils [req-fe16410b-c4f3-4dc2-a367-72244aee7a23 req-46ff31e1-2e0c-4749-b8fb-bc887dff16e5 service nova] Acquiring lock "refresh_cache-384889a3-c3d9-4e0e-8d1c-95193cf4343d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.551931] env[69994]: DEBUG oslo_concurrency.lockutils [req-fe16410b-c4f3-4dc2-a367-72244aee7a23 req-46ff31e1-2e0c-4749-b8fb-bc887dff16e5 service nova] Acquired lock "refresh_cache-384889a3-c3d9-4e0e-8d1c-95193cf4343d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1034.552100] env[69994]: DEBUG nova.network.neutron [req-fe16410b-c4f3-4dc2-a367-72244aee7a23 req-46ff31e1-2e0c-4749-b8fb-bc887dff16e5 service nova] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Refreshing network info cache for port 83645e16-3809-4855-9874-a71858e590d0 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1034.590829] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]523b6555-ea20-fc1e-b034-f317c6fe8c18, 'name': SearchDatastore_Task, 'duration_secs': 0.008621} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.591846] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb2c7ab6-a2aa-46a1-bb18-5e4ca9417b80 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.596666] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Waiting for the task: (returnval){ [ 1034.596666] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5257c7c3-c222-2e67-4e1b-cc36ffdb5f4c" [ 1034.596666] env[69994]: _type = "Task" [ 1034.596666] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.605640] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5257c7c3-c222-2e67-4e1b-cc36ffdb5f4c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.659034] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "fc31da72-d09e-415e-9866-3e7fc91fec79" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.659150] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "fc31da72-d09e-415e-9866-3e7fc91fec79" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.756992] env[69994]: DEBUG oslo_vmware.api [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]529d4ac0-d9b0-8a0e-3264-0308ad3ada9f, 'name': SearchDatastore_Task, 'duration_secs': 0.008566} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.757803] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f076d8c-436e-443f-af03-6a37d3e5522e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.763311] env[69994]: DEBUG oslo_vmware.api [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Waiting for the task: (returnval){ [ 1034.763311] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52c96aa2-ee55-0d95-8edb-99bc11ebc995" [ 1034.763311] env[69994]: _type = "Task" [ 1034.763311] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.770331] env[69994]: DEBUG oslo_vmware.api [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52c96aa2-ee55-0d95-8edb-99bc11ebc995, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.844314] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Acquiring lock "refresh_cache-384889a3-c3d9-4e0e-8d1c-95193cf4343d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.018767] env[69994]: DEBUG nova.compute.utils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1035.022365] env[69994]: DEBUG nova.compute.manager [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1035.022540] env[69994]: DEBUG nova.network.neutron [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1035.066480] env[69994]: DEBUG nova.policy [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5636aa429a354815be1db94c247c99a4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ea1445fce8f849508444c1793de63df1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1035.090518] env[69994]: DEBUG nova.network.neutron [req-fe16410b-c4f3-4dc2-a367-72244aee7a23 req-46ff31e1-2e0c-4749-b8fb-bc887dff16e5 service nova] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1035.108964] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5257c7c3-c222-2e67-4e1b-cc36ffdb5f4c, 'name': SearchDatastore_Task, 'duration_secs': 0.010282} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.109191] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1035.109472] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 071151e4-a3ee-4a89-8b83-19bef3fb7d3e/071151e4-a3ee-4a89-8b83-19bef3fb7d3e.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1035.109729] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ef1e0307-f045-4bec-8ed6-0f3ca55204d3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.116091] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Waiting for the task: (returnval){ [ 1035.116091] env[69994]: value = "task-3242371" [ 1035.116091] env[69994]: _type = "Task" [ 1035.116091] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.126079] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242371, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.161491] env[69994]: DEBUG nova.compute.manager [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1035.222123] env[69994]: DEBUG nova.network.neutron [req-fe16410b-c4f3-4dc2-a367-72244aee7a23 req-46ff31e1-2e0c-4749-b8fb-bc887dff16e5 service nova] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.261755] env[69994]: DEBUG nova.network.neutron [req-8a384154-b1b2-49a1-a72f-6e27108793ab req-7e3a11f9-f0d8-4815-9e43-5c8dcde79f01 service nova] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Updated VIF entry in instance network info cache for port 8c465e61-001d-4355-b0db-ac3a4e1aeec6. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1035.261755] env[69994]: DEBUG nova.network.neutron [req-8a384154-b1b2-49a1-a72f-6e27108793ab req-7e3a11f9-f0d8-4815-9e43-5c8dcde79f01 service nova] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Updating instance_info_cache with network_info: [{"id": "8c465e61-001d-4355-b0db-ac3a4e1aeec6", "address": "fa:16:3e:08:58:d3", "network": {"id": "927a1a97-aee6-47bc-b938-a8aeb845acdc", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-363045651-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea1445fce8f849508444c1793de63df1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c465e61-00", "ovs_interfaceid": "8c465e61-001d-4355-b0db-ac3a4e1aeec6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.274198] env[69994]: DEBUG oslo_vmware.api [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52c96aa2-ee55-0d95-8edb-99bc11ebc995, 'name': SearchDatastore_Task, 'duration_secs': 0.009443} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.277030] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1035.277030] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 81bae584-e558-4f96-9696-2510fed5a2e0/81bae584-e558-4f96-9696-2510fed5a2e0.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1035.277030] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-01ece353-9bc1-43ec-a270-90771035b2a9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.283693] env[69994]: DEBUG oslo_vmware.api [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Waiting for the task: (returnval){ [ 1035.283693] env[69994]: value = "task-3242372" [ 1035.283693] env[69994]: _type = "Task" [ 1035.283693] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.294164] env[69994]: DEBUG oslo_vmware.api [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': task-3242372, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.407982] env[69994]: DEBUG nova.network.neutron [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Successfully created port: 79ff1faf-be56-422b-9591-03f17c055f66 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1035.523414] env[69994]: DEBUG nova.compute.manager [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1035.630357] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242371, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.461981} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.630357] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 071151e4-a3ee-4a89-8b83-19bef3fb7d3e/071151e4-a3ee-4a89-8b83-19bef3fb7d3e.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1035.630525] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1035.630728] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-355ab4aa-1679-4101-a789-216cf7e2743d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.637978] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Waiting for the task: (returnval){ [ 1035.637978] env[69994]: value = "task-3242373" [ 1035.637978] env[69994]: _type = "Task" [ 1035.637978] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.646977] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242373, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.696021] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1035.724499] env[69994]: DEBUG oslo_concurrency.lockutils [req-fe16410b-c4f3-4dc2-a367-72244aee7a23 req-46ff31e1-2e0c-4749-b8fb-bc887dff16e5 service nova] Releasing lock "refresh_cache-384889a3-c3d9-4e0e-8d1c-95193cf4343d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1035.725007] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Acquired lock "refresh_cache-384889a3-c3d9-4e0e-8d1c-95193cf4343d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1035.725208] env[69994]: DEBUG nova.network.neutron [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1035.764572] env[69994]: DEBUG oslo_concurrency.lockutils [req-8a384154-b1b2-49a1-a72f-6e27108793ab req-7e3a11f9-f0d8-4815-9e43-5c8dcde79f01 service nova] Releasing lock "refresh_cache-071151e4-a3ee-4a89-8b83-19bef3fb7d3e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1035.799148] env[69994]: DEBUG oslo_vmware.api [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': task-3242372, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.833810] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fac6a85-ed7e-4b98-8582-4f3a5b42fead {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.842289] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-454ebaa7-7817-454d-bc90-ad4cbe8b8061 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.876707] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b93f5f8-1b32-4ce8-bdfa-6099b4c6506f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.884586] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5332c4c8-c0b3-49a2-895f-6e1b99109fd4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.900183] env[69994]: DEBUG nova.compute.provider_tree [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1036.146988] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242373, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076636} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.147244] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1036.147962] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0e5afd9-684f-464e-9b02-15fc0aff0ea9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.171442] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] 071151e4-a3ee-4a89-8b83-19bef3fb7d3e/071151e4-a3ee-4a89-8b83-19bef3fb7d3e.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1036.171442] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-badb6c50-e51a-4b0b-b3ba-99f1afbf8aa9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.191082] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Waiting for the task: (returnval){ [ 1036.191082] env[69994]: value = "task-3242374" [ 1036.191082] env[69994]: _type = "Task" [ 1036.191082] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.198823] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242374, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.260939] env[69994]: DEBUG nova.network.neutron [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1036.294981] env[69994]: DEBUG oslo_vmware.api [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': task-3242372, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.655849} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.295290] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 81bae584-e558-4f96-9696-2510fed5a2e0/81bae584-e558-4f96-9696-2510fed5a2e0.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1036.295522] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1036.297816] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-740e9c40-3aec-4c5d-8551-ce4d4a4f4f80 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.305640] env[69994]: DEBUG oslo_vmware.api [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Waiting for the task: (returnval){ [ 1036.305640] env[69994]: value = "task-3242375" [ 1036.305640] env[69994]: _type = "Task" [ 1036.305640] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.313217] env[69994]: DEBUG oslo_vmware.api [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': task-3242375, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.387136] env[69994]: DEBUG nova.network.neutron [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Updating instance_info_cache with network_info: [{"id": "83645e16-3809-4855-9874-a71858e590d0", "address": "fa:16:3e:f3:b3:3e", "network": {"id": "927a1a97-aee6-47bc-b938-a8aeb845acdc", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-363045651-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea1445fce8f849508444c1793de63df1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83645e16-38", "ovs_interfaceid": "83645e16-3809-4855-9874-a71858e590d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.403615] env[69994]: DEBUG nova.scheduler.client.report [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1036.537244] env[69994]: DEBUG nova.compute.manager [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1036.562285] env[69994]: DEBUG nova.virt.hardware [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1036.562534] env[69994]: DEBUG nova.virt.hardware [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1036.562691] env[69994]: DEBUG nova.virt.hardware [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1036.562870] env[69994]: DEBUG nova.virt.hardware [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1036.563030] env[69994]: DEBUG nova.virt.hardware [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1036.563190] env[69994]: DEBUG nova.virt.hardware [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1036.563397] env[69994]: DEBUG nova.virt.hardware [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1036.563556] env[69994]: DEBUG nova.virt.hardware [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1036.563721] env[69994]: DEBUG nova.virt.hardware [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1036.563880] env[69994]: DEBUG nova.virt.hardware [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1036.564069] env[69994]: DEBUG nova.virt.hardware [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1036.564916] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d66239f-7c01-4ee4-8c7d-ae804f0c0cb5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.572883] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-768b8ea7-0718-4d04-9a46-16ee11778155 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.587168] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b31e360-30dd-4434-82d9-6a699a1d8675 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Volume attach. Driver type: vmdk {{(pid=69994) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1036.587393] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b31e360-30dd-4434-82d9-6a699a1d8675 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647983', 'volume_id': 'a81b55d5-7771-408e-8f27-6b9ddfd1b4d9', 'name': 'volume-a81b55d5-7771-408e-8f27-6b9ddfd1b4d9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '29071eb9-6334-4c23-acb4-142c12aa448d', 'attached_at': '', 'detached_at': '', 'volume_id': 'a81b55d5-7771-408e-8f27-6b9ddfd1b4d9', 'serial': 'a81b55d5-7771-408e-8f27-6b9ddfd1b4d9'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1036.588156] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-664cd960-22d6-4ea6-a618-0e81272b1a82 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.602761] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a301c5e-2c8b-4b26-8221-c5bd7b02108b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.626071] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b31e360-30dd-4434-82d9-6a699a1d8675 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] volume-a81b55d5-7771-408e-8f27-6b9ddfd1b4d9/volume-a81b55d5-7771-408e-8f27-6b9ddfd1b4d9.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1036.626304] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5139b092-1d04-46b6-8f31-05216252998e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.643102] env[69994]: DEBUG oslo_vmware.api [None req-5b31e360-30dd-4434-82d9-6a699a1d8675 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for the task: (returnval){ [ 1036.643102] env[69994]: value = "task-3242376" [ 1036.643102] env[69994]: _type = "Task" [ 1036.643102] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.652994] env[69994]: DEBUG oslo_vmware.api [None req-5b31e360-30dd-4434-82d9-6a699a1d8675 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3242376, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.701075] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242374, 'name': ReconfigVM_Task, 'duration_secs': 0.326972} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.701394] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Reconfigured VM instance instance-00000057 to attach disk [datastore1] 071151e4-a3ee-4a89-8b83-19bef3fb7d3e/071151e4-a3ee-4a89-8b83-19bef3fb7d3e.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1036.702039] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-152100d8-7ce2-4b95-9356-7f59104ff59b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.709475] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Waiting for the task: (returnval){ [ 1036.709475] env[69994]: value = "task-3242377" [ 1036.709475] env[69994]: _type = "Task" [ 1036.709475] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.719027] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242377, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.816354] env[69994]: DEBUG oslo_vmware.api [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': task-3242375, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06555} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.816655] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1036.817463] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dd9cca9-e4c4-4ca8-9cfc-0b2722067602 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.846631] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Reconfiguring VM instance instance-00000056 to attach disk [datastore2] 81bae584-e558-4f96-9696-2510fed5a2e0/81bae584-e558-4f96-9696-2510fed5a2e0.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1036.846971] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4348ed7d-62b8-447a-af20-2e27d6697e43 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.864324] env[69994]: DEBUG nova.compute.manager [req-1f465d60-f3bd-4fce-aec1-9763e39ea101 req-76daf906-0139-4cf8-9491-686bac7381c5 service nova] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Received event network-vif-plugged-79ff1faf-be56-422b-9591-03f17c055f66 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1036.864551] env[69994]: DEBUG oslo_concurrency.lockutils [req-1f465d60-f3bd-4fce-aec1-9763e39ea101 req-76daf906-0139-4cf8-9491-686bac7381c5 service nova] Acquiring lock "799bf051-86b4-45bd-b9bf-df767074dac8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1036.864770] env[69994]: DEBUG oslo_concurrency.lockutils [req-1f465d60-f3bd-4fce-aec1-9763e39ea101 req-76daf906-0139-4cf8-9491-686bac7381c5 service nova] Lock "799bf051-86b4-45bd-b9bf-df767074dac8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1036.864940] env[69994]: DEBUG oslo_concurrency.lockutils [req-1f465d60-f3bd-4fce-aec1-9763e39ea101 req-76daf906-0139-4cf8-9491-686bac7381c5 service nova] Lock "799bf051-86b4-45bd-b9bf-df767074dac8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1036.865137] env[69994]: DEBUG nova.compute.manager [req-1f465d60-f3bd-4fce-aec1-9763e39ea101 req-76daf906-0139-4cf8-9491-686bac7381c5 service nova] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] No waiting events found dispatching network-vif-plugged-79ff1faf-be56-422b-9591-03f17c055f66 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1036.865303] env[69994]: WARNING nova.compute.manager [req-1f465d60-f3bd-4fce-aec1-9763e39ea101 req-76daf906-0139-4cf8-9491-686bac7381c5 service nova] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Received unexpected event network-vif-plugged-79ff1faf-be56-422b-9591-03f17c055f66 for instance with vm_state building and task_state spawning. [ 1036.871422] env[69994]: DEBUG oslo_vmware.api [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Waiting for the task: (returnval){ [ 1036.871422] env[69994]: value = "task-3242378" [ 1036.871422] env[69994]: _type = "Task" [ 1036.871422] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.880832] env[69994]: DEBUG oslo_vmware.api [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': task-3242378, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.893028] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Releasing lock "refresh_cache-384889a3-c3d9-4e0e-8d1c-95193cf4343d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1036.893028] env[69994]: DEBUG nova.compute.manager [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Instance network_info: |[{"id": "83645e16-3809-4855-9874-a71858e590d0", "address": "fa:16:3e:f3:b3:3e", "network": {"id": "927a1a97-aee6-47bc-b938-a8aeb845acdc", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-363045651-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea1445fce8f849508444c1793de63df1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83645e16-38", "ovs_interfaceid": "83645e16-3809-4855-9874-a71858e590d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1036.893028] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f3:b3:3e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7150f662-0cf1-44f9-ae14-d70f479649b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '83645e16-3809-4855-9874-a71858e590d0', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1036.899121] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1036.899549] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1036.900349] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-464b0f87-8737-461a-ba4f-df2c6b6527a5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.925122] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.413s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1036.925681] env[69994]: DEBUG nova.compute.manager [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1036.928513] env[69994]: DEBUG oslo_concurrency.lockutils [None req-75f39166-1ffc-4040-a8c0-5fa40d188a1e tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.169s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1036.928513] env[69994]: DEBUG nova.objects.instance [None req-75f39166-1ffc-4040-a8c0-5fa40d188a1e tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Lazy-loading 'resources' on Instance uuid eed22b8d-f8ea-4b90-8730-61d9a89ddfaa {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1036.936744] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1036.936744] env[69994]: value = "task-3242379" [ 1036.936744] env[69994]: _type = "Task" [ 1036.936744] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.949621] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242379, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.043761] env[69994]: DEBUG nova.network.neutron [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Successfully updated port: 79ff1faf-be56-422b-9591-03f17c055f66 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1037.153187] env[69994]: DEBUG oslo_vmware.api [None req-5b31e360-30dd-4434-82d9-6a699a1d8675 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3242376, 'name': ReconfigVM_Task, 'duration_secs': 0.478594} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.153364] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b31e360-30dd-4434-82d9-6a699a1d8675 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Reconfigured VM instance instance-0000004b to attach disk [datastore1] volume-a81b55d5-7771-408e-8f27-6b9ddfd1b4d9/volume-a81b55d5-7771-408e-8f27-6b9ddfd1b4d9.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1037.159057] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb173ce4-0ed0-470a-9683-386caee2d19b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.174790] env[69994]: DEBUG oslo_vmware.api [None req-5b31e360-30dd-4434-82d9-6a699a1d8675 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for the task: (returnval){ [ 1037.174790] env[69994]: value = "task-3242380" [ 1037.174790] env[69994]: _type = "Task" [ 1037.174790] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.189536] env[69994]: DEBUG oslo_vmware.api [None req-5b31e360-30dd-4434-82d9-6a699a1d8675 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3242380, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.218902] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242377, 'name': Rename_Task, 'duration_secs': 0.161973} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.219181] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1037.219842] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aa90dc66-6db8-48d0-b080-d718ed86b094 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.226345] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Waiting for the task: (returnval){ [ 1037.226345] env[69994]: value = "task-3242381" [ 1037.226345] env[69994]: _type = "Task" [ 1037.226345] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.234375] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242381, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.386179] env[69994]: DEBUG oslo_vmware.api [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': task-3242378, 'name': ReconfigVM_Task, 'duration_secs': 0.320241} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.386179] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Reconfigured VM instance instance-00000056 to attach disk [datastore2] 81bae584-e558-4f96-9696-2510fed5a2e0/81bae584-e558-4f96-9696-2510fed5a2e0.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1037.386626] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-94eb7a38-f246-42b0-a983-5aaad09c69a2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.396808] env[69994]: DEBUG oslo_vmware.api [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Waiting for the task: (returnval){ [ 1037.396808] env[69994]: value = "task-3242382" [ 1037.396808] env[69994]: _type = "Task" [ 1037.396808] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.409680] env[69994]: DEBUG oslo_vmware.api [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': task-3242382, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.432445] env[69994]: DEBUG nova.compute.utils [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1037.436848] env[69994]: DEBUG nova.compute.manager [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1037.437089] env[69994]: DEBUG nova.network.neutron [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1037.449658] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242379, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.482957] env[69994]: DEBUG nova.policy [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b512f0a1ffba457b977e472009f59eed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '377f65074c2442588aee091b5165e1cf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1037.549218] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Acquiring lock "refresh_cache-799bf051-86b4-45bd-b9bf-df767074dac8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.549504] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Acquired lock "refresh_cache-799bf051-86b4-45bd-b9bf-df767074dac8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1037.549578] env[69994]: DEBUG nova.network.neutron [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1037.687679] env[69994]: DEBUG oslo_vmware.api [None req-5b31e360-30dd-4434-82d9-6a699a1d8675 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3242380, 'name': ReconfigVM_Task, 'duration_secs': 0.291762} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.687679] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b31e360-30dd-4434-82d9-6a699a1d8675 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647983', 'volume_id': 'a81b55d5-7771-408e-8f27-6b9ddfd1b4d9', 'name': 'volume-a81b55d5-7771-408e-8f27-6b9ddfd1b4d9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '29071eb9-6334-4c23-acb4-142c12aa448d', 'attached_at': '', 'detached_at': '', 'volume_id': 'a81b55d5-7771-408e-8f27-6b9ddfd1b4d9', 'serial': 'a81b55d5-7771-408e-8f27-6b9ddfd1b4d9'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1037.742654] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242381, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.746571] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-060fbd00-b483-4bbc-a17c-1bca64ce7f5a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.759815] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b0792a9-6115-4704-b7cb-8eff901f9feb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.793017] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0196e70-13b7-49a1-99f7-51b7231bdfa6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.801366] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf401209-86a5-4256-9915-663013b1a44d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.816572] env[69994]: DEBUG nova.compute.provider_tree [None req-75f39166-1ffc-4040-a8c0-5fa40d188a1e tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1037.825253] env[69994]: DEBUG nova.network.neutron [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Successfully created port: 92378003-993a-43f2-8823-55a4b83acdef {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1037.908039] env[69994]: DEBUG oslo_vmware.api [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': task-3242382, 'name': Rename_Task, 'duration_secs': 0.174206} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.908039] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1037.908256] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-12f66048-8d09-4db0-98c7-9fa8f2ea8872 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.914958] env[69994]: DEBUG oslo_vmware.api [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Waiting for the task: (returnval){ [ 1037.914958] env[69994]: value = "task-3242383" [ 1037.914958] env[69994]: _type = "Task" [ 1037.914958] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.922563] env[69994]: DEBUG oslo_vmware.api [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': task-3242383, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.937437] env[69994]: DEBUG nova.compute.manager [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1037.952672] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242379, 'name': CreateVM_Task, 'duration_secs': 0.56745} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.953065] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1037.953753] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.953973] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1037.954426] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1037.954746] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a72566ee-222d-4a27-888b-b9d5fa2319f5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.961162] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Waiting for the task: (returnval){ [ 1037.961162] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]527e0fec-b967-5aa6-5466-e6a2d12ed9a4" [ 1037.961162] env[69994]: _type = "Task" [ 1037.961162] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.972460] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]527e0fec-b967-5aa6-5466-e6a2d12ed9a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.091218] env[69994]: DEBUG nova.network.neutron [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1038.232460] env[69994]: DEBUG nova.network.neutron [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Updating instance_info_cache with network_info: [{"id": "79ff1faf-be56-422b-9591-03f17c055f66", "address": "fa:16:3e:66:f7:b2", "network": {"id": "927a1a97-aee6-47bc-b938-a8aeb845acdc", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-363045651-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea1445fce8f849508444c1793de63df1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79ff1faf-be", "ovs_interfaceid": "79ff1faf-be56-422b-9591-03f17c055f66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1038.240089] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242381, 'name': PowerOnVM_Task, 'duration_secs': 0.547284} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.240982] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1038.241254] env[69994]: INFO nova.compute.manager [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Took 7.41 seconds to spawn the instance on the hypervisor. [ 1038.241478] env[69994]: DEBUG nova.compute.manager [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1038.242337] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8366add-8098-490b-9ae7-89bb02306832 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.320077] env[69994]: DEBUG nova.scheduler.client.report [None req-75f39166-1ffc-4040-a8c0-5fa40d188a1e tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1038.428592] env[69994]: DEBUG oslo_vmware.api [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': task-3242383, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.473102] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]527e0fec-b967-5aa6-5466-e6a2d12ed9a4, 'name': SearchDatastore_Task, 'duration_secs': 0.011856} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.473291] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1038.473522] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1038.473773] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.473921] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1038.474182] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1038.474389] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d102ff34-9989-4d00-88d4-d1107d4dac82 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.485232] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1038.485513] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1038.486392] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07e9c567-7ddb-4859-b7e1-bb29ba336970 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.493481] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Waiting for the task: (returnval){ [ 1038.493481] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5224ffc7-6aca-994c-3f9e-2142538c194e" [ 1038.493481] env[69994]: _type = "Task" [ 1038.493481] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.502587] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5224ffc7-6aca-994c-3f9e-2142538c194e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.729065] env[69994]: DEBUG nova.objects.instance [None req-5b31e360-30dd-4434-82d9-6a699a1d8675 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lazy-loading 'flavor' on Instance uuid 29071eb9-6334-4c23-acb4-142c12aa448d {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1038.735542] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Releasing lock "refresh_cache-799bf051-86b4-45bd-b9bf-df767074dac8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1038.735542] env[69994]: DEBUG nova.compute.manager [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Instance network_info: |[{"id": "79ff1faf-be56-422b-9591-03f17c055f66", "address": "fa:16:3e:66:f7:b2", "network": {"id": "927a1a97-aee6-47bc-b938-a8aeb845acdc", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-363045651-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea1445fce8f849508444c1793de63df1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79ff1faf-be", "ovs_interfaceid": "79ff1faf-be56-422b-9591-03f17c055f66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1038.735856] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:66:f7:b2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7150f662-0cf1-44f9-ae14-d70f479649b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '79ff1faf-be56-422b-9591-03f17c055f66', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1038.743731] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1038.744643] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1038.744824] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1956ec71-ffb4-4328-8659-af7e50775e2d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.766391] env[69994]: INFO nova.compute.manager [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Took 21.64 seconds to build instance. [ 1038.770608] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1038.770608] env[69994]: value = "task-3242384" [ 1038.770608] env[69994]: _type = "Task" [ 1038.770608] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.780671] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242384, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.825472] env[69994]: DEBUG oslo_concurrency.lockutils [None req-75f39166-1ffc-4040-a8c0-5fa40d188a1e tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.897s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1038.827835] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9c843595-559b-418c-976d-4ad9b9244a6e tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.724s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1038.828085] env[69994]: DEBUG nova.objects.instance [None req-9c843595-559b-418c-976d-4ad9b9244a6e tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Lazy-loading 'resources' on Instance uuid 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1038.851633] env[69994]: INFO nova.scheduler.client.report [None req-75f39166-1ffc-4040-a8c0-5fa40d188a1e tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Deleted allocations for instance eed22b8d-f8ea-4b90-8730-61d9a89ddfaa [ 1038.886951] env[69994]: DEBUG nova.compute.manager [req-c4777e7b-b24b-46b0-a4e3-d2f53cf7e290 req-5e4cbd2b-9d08-49bb-b57d-0ca765cd4a76 service nova] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Received event network-changed-79ff1faf-be56-422b-9591-03f17c055f66 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1038.887177] env[69994]: DEBUG nova.compute.manager [req-c4777e7b-b24b-46b0-a4e3-d2f53cf7e290 req-5e4cbd2b-9d08-49bb-b57d-0ca765cd4a76 service nova] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Refreshing instance network info cache due to event network-changed-79ff1faf-be56-422b-9591-03f17c055f66. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1038.887439] env[69994]: DEBUG oslo_concurrency.lockutils [req-c4777e7b-b24b-46b0-a4e3-d2f53cf7e290 req-5e4cbd2b-9d08-49bb-b57d-0ca765cd4a76 service nova] Acquiring lock "refresh_cache-799bf051-86b4-45bd-b9bf-df767074dac8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.887681] env[69994]: DEBUG oslo_concurrency.lockutils [req-c4777e7b-b24b-46b0-a4e3-d2f53cf7e290 req-5e4cbd2b-9d08-49bb-b57d-0ca765cd4a76 service nova] Acquired lock "refresh_cache-799bf051-86b4-45bd-b9bf-df767074dac8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1038.887753] env[69994]: DEBUG nova.network.neutron [req-c4777e7b-b24b-46b0-a4e3-d2f53cf7e290 req-5e4cbd2b-9d08-49bb-b57d-0ca765cd4a76 service nova] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Refreshing network info cache for port 79ff1faf-be56-422b-9591-03f17c055f66 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1038.927571] env[69994]: DEBUG oslo_vmware.api [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': task-3242383, 'name': PowerOnVM_Task, 'duration_secs': 0.553639} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.927988] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1038.928286] env[69994]: INFO nova.compute.manager [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Took 11.50 seconds to spawn the instance on the hypervisor. [ 1038.928514] env[69994]: DEBUG nova.compute.manager [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1038.929344] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0e55486-646c-4019-bbe2-d75f77baaeee {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.951553] env[69994]: DEBUG nova.compute.manager [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1038.979436] env[69994]: DEBUG nova.virt.hardware [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1038.979739] env[69994]: DEBUG nova.virt.hardware [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1038.979875] env[69994]: DEBUG nova.virt.hardware [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1038.980097] env[69994]: DEBUG nova.virt.hardware [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1038.980279] env[69994]: DEBUG nova.virt.hardware [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1038.980454] env[69994]: DEBUG nova.virt.hardware [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1038.980668] env[69994]: DEBUG nova.virt.hardware [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1038.980851] env[69994]: DEBUG nova.virt.hardware [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1038.981039] env[69994]: DEBUG nova.virt.hardware [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1038.981228] env[69994]: DEBUG nova.virt.hardware [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1038.981427] env[69994]: DEBUG nova.virt.hardware [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1038.982708] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c38e77ef-92bc-4ca2-9487-59cd5d51f167 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.991313] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bd996ff-8f7a-45bc-ad9d-e944f47f5a67 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.012426] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5224ffc7-6aca-994c-3f9e-2142538c194e, 'name': SearchDatastore_Task, 'duration_secs': 0.010658} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.013224] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a96b5793-01ac-40b2-a833-e137a457361c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.019026] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Waiting for the task: (returnval){ [ 1039.019026] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52725cd0-3f31-c7ee-6d1c-eccebc047c74" [ 1039.019026] env[69994]: _type = "Task" [ 1039.019026] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.028366] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52725cd0-3f31-c7ee-6d1c-eccebc047c74, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.234488] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5b31e360-30dd-4434-82d9-6a699a1d8675 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "29071eb9-6334-4c23-acb4-142c12aa448d" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.241s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1039.269126] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Lock "071151e4-a3ee-4a89-8b83-19bef3fb7d3e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.149s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1039.282527] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242384, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.361626] env[69994]: DEBUG oslo_concurrency.lockutils [None req-75f39166-1ffc-4040-a8c0-5fa40d188a1e tempest-InstanceActionsNegativeTestJSON-1977333424 tempest-InstanceActionsNegativeTestJSON-1977333424-project-member] Lock "eed22b8d-f8ea-4b90-8730-61d9a89ddfaa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.986s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1039.449419] env[69994]: INFO nova.compute.manager [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Took 25.01 seconds to build instance. [ 1039.532508] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52725cd0-3f31-c7ee-6d1c-eccebc047c74, 'name': SearchDatastore_Task, 'duration_secs': 0.010913} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.536349] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1039.536670] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 384889a3-c3d9-4e0e-8d1c-95193cf4343d/384889a3-c3d9-4e0e-8d1c-95193cf4343d.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1039.545657] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-759a6cb2-0558-456e-bd7a-f5e5cde9e783 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.562808] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Waiting for the task: (returnval){ [ 1039.562808] env[69994]: value = "task-3242385" [ 1039.562808] env[69994]: _type = "Task" [ 1039.562808] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.568890] env[69994]: DEBUG nova.network.neutron [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Successfully updated port: 92378003-993a-43f2-8823-55a4b83acdef {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1039.578895] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242385, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.705490] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5af9b761-d00c-4db6-ad0c-a8c7d82b1e0d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.714934] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec4ba3ab-a2af-4e1f-8aa6-6522de5cd7f4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.753351] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b732e9b-b13c-4460-beb3-75eeea146095 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.763600] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efb22027-bbcf-4347-9823-5471572a0314 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.768614] env[69994]: DEBUG nova.network.neutron [req-c4777e7b-b24b-46b0-a4e3-d2f53cf7e290 req-5e4cbd2b-9d08-49bb-b57d-0ca765cd4a76 service nova] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Updated VIF entry in instance network info cache for port 79ff1faf-be56-422b-9591-03f17c055f66. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1039.768953] env[69994]: DEBUG nova.network.neutron [req-c4777e7b-b24b-46b0-a4e3-d2f53cf7e290 req-5e4cbd2b-9d08-49bb-b57d-0ca765cd4a76 service nova] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Updating instance_info_cache with network_info: [{"id": "79ff1faf-be56-422b-9591-03f17c055f66", "address": "fa:16:3e:66:f7:b2", "network": {"id": "927a1a97-aee6-47bc-b938-a8aeb845acdc", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-363045651-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea1445fce8f849508444c1793de63df1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79ff1faf-be", "ovs_interfaceid": "79ff1faf-be56-422b-9591-03f17c055f66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1039.785687] env[69994]: DEBUG nova.compute.provider_tree [None req-9c843595-559b-418c-976d-4ad9b9244a6e tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1039.794422] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242384, 'name': CreateVM_Task, 'duration_secs': 0.51314} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.795077] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1039.795765] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.795927] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1039.796266] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1039.796935] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db2841fc-f486-419c-b558-fc2cfe2a6238 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.801832] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Waiting for the task: (returnval){ [ 1039.801832] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52ae2687-3143-e323-485a-55fe326257a0" [ 1039.801832] env[69994]: _type = "Task" [ 1039.801832] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.810902] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52ae2687-3143-e323-485a-55fe326257a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.951169] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5e36ff6a-43d3-448a-b38f-91eb125875ae tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Lock "81bae584-e558-4f96-9696-2510fed5a2e0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.519s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1040.049957] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f84af1ea-e7ad-4056-8594-efcd5391769a tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Acquiring lock "81bae584-e558-4f96-9696-2510fed5a2e0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1040.050312] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f84af1ea-e7ad-4056-8594-efcd5391769a tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Lock "81bae584-e558-4f96-9696-2510fed5a2e0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1040.050543] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f84af1ea-e7ad-4056-8594-efcd5391769a tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Acquiring lock "81bae584-e558-4f96-9696-2510fed5a2e0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1040.050775] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f84af1ea-e7ad-4056-8594-efcd5391769a tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Lock "81bae584-e558-4f96-9696-2510fed5a2e0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1040.050970] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f84af1ea-e7ad-4056-8594-efcd5391769a tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Lock "81bae584-e558-4f96-9696-2510fed5a2e0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1040.054106] env[69994]: INFO nova.compute.manager [None req-f84af1ea-e7ad-4056-8594-efcd5391769a tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Terminating instance [ 1040.072895] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "refresh_cache-ab99499b-21a2-465b-9975-4e0adb18df94" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1040.073264] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquired lock "refresh_cache-ab99499b-21a2-465b-9975-4e0adb18df94" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1040.073264] env[69994]: DEBUG nova.network.neutron [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1040.082628] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242385, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.193873] env[69994]: DEBUG oslo_concurrency.lockutils [None req-86a3841a-c76e-47d9-a1ae-2ebb3bc91c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Acquiring lock "29071eb9-6334-4c23-acb4-142c12aa448d" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1040.194266] env[69994]: DEBUG oslo_concurrency.lockutils [None req-86a3841a-c76e-47d9-a1ae-2ebb3bc91c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "29071eb9-6334-4c23-acb4-142c12aa448d" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1040.271841] env[69994]: DEBUG oslo_concurrency.lockutils [req-c4777e7b-b24b-46b0-a4e3-d2f53cf7e290 req-5e4cbd2b-9d08-49bb-b57d-0ca765cd4a76 service nova] Releasing lock "refresh_cache-799bf051-86b4-45bd-b9bf-df767074dac8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1040.289030] env[69994]: DEBUG nova.scheduler.client.report [None req-9c843595-559b-418c-976d-4ad9b9244a6e tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1040.315332] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52ae2687-3143-e323-485a-55fe326257a0, 'name': SearchDatastore_Task, 'duration_secs': 0.012491} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.315655] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1040.315915] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1040.316184] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1040.316364] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1040.316592] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1040.316856] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bd33c79d-7f66-4f7c-8587-0b6d634f5194 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.328038] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1040.328038] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1040.328674] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7301c308-69ef-4140-a705-c32e12ecf8db {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.335566] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Waiting for the task: (returnval){ [ 1040.335566] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]526869af-cfd8-a31c-71ef-45af2c684009" [ 1040.335566] env[69994]: _type = "Task" [ 1040.335566] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.345523] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]526869af-cfd8-a31c-71ef-45af2c684009, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.559493] env[69994]: DEBUG nova.compute.manager [None req-f84af1ea-e7ad-4056-8594-efcd5391769a tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1040.560039] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f84af1ea-e7ad-4056-8594-efcd5391769a tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1040.560998] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ea26b37-82ac-47c0-ab82-b4fd138fda41 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.570185] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f84af1ea-e7ad-4056-8594-efcd5391769a tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1040.574136] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-be89cda6-14e2-4af2-850d-04c9a0802f08 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.584385] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242385, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.803814} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.585957] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 384889a3-c3d9-4e0e-8d1c-95193cf4343d/384889a3-c3d9-4e0e-8d1c-95193cf4343d.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1040.586957] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1040.586957] env[69994]: DEBUG oslo_vmware.api [None req-f84af1ea-e7ad-4056-8594-efcd5391769a tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Waiting for the task: (returnval){ [ 1040.586957] env[69994]: value = "task-3242386" [ 1040.586957] env[69994]: _type = "Task" [ 1040.586957] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.587206] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e89cad34-2e90-441f-bf4e-873455da8cbb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.606733] env[69994]: DEBUG oslo_vmware.api [None req-f84af1ea-e7ad-4056-8594-efcd5391769a tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': task-3242386, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.608634] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Waiting for the task: (returnval){ [ 1040.608634] env[69994]: value = "task-3242387" [ 1040.608634] env[69994]: _type = "Task" [ 1040.608634] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.619273] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242387, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.631890] env[69994]: DEBUG nova.network.neutron [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1040.699491] env[69994]: DEBUG nova.compute.utils [None req-86a3841a-c76e-47d9-a1ae-2ebb3bc91c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1040.793980] env[69994]: DEBUG nova.network.neutron [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Updating instance_info_cache with network_info: [{"id": "92378003-993a-43f2-8823-55a4b83acdef", "address": "fa:16:3e:a4:fe:80", "network": {"id": "0e925a40-2706-4137-8dd4-eeed26ae606e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1885024452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "377f65074c2442588aee091b5165e1cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92378003-99", "ovs_interfaceid": "92378003-993a-43f2-8823-55a4b83acdef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1040.794727] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9c843595-559b-418c-976d-4ad9b9244a6e tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.967s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1040.797887] env[69994]: DEBUG oslo_concurrency.lockutils [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.279s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1040.798266] env[69994]: DEBUG nova.objects.instance [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lazy-loading 'resources' on Instance uuid 8f5a5852-cd78-434f-b413-3cc2314575bb {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1040.819520] env[69994]: INFO nova.scheduler.client.report [None req-9c843595-559b-418c-976d-4ad9b9244a6e tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Deleted allocations for instance 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e [ 1040.848293] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]526869af-cfd8-a31c-71ef-45af2c684009, 'name': SearchDatastore_Task, 'duration_secs': 0.014165} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.849125] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c46b251b-251f-4fc5-afc0-202e2a4c55ae {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.854561] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Waiting for the task: (returnval){ [ 1040.854561] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52665aed-5474-caa3-a48c-35363aba30de" [ 1040.854561] env[69994]: _type = "Task" [ 1040.854561] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.862433] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52665aed-5474-caa3-a48c-35363aba30de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.909403] env[69994]: DEBUG nova.compute.manager [req-06ff13ef-a3d2-4dbe-8729-71660ecb7198 req-0ef3114a-238b-4774-b37a-e8a72b7ed711 service nova] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Received event network-vif-plugged-92378003-993a-43f2-8823-55a4b83acdef {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1040.909564] env[69994]: DEBUG oslo_concurrency.lockutils [req-06ff13ef-a3d2-4dbe-8729-71660ecb7198 req-0ef3114a-238b-4774-b37a-e8a72b7ed711 service nova] Acquiring lock "ab99499b-21a2-465b-9975-4e0adb18df94-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1040.909761] env[69994]: DEBUG oslo_concurrency.lockutils [req-06ff13ef-a3d2-4dbe-8729-71660ecb7198 req-0ef3114a-238b-4774-b37a-e8a72b7ed711 service nova] Lock "ab99499b-21a2-465b-9975-4e0adb18df94-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1040.909915] env[69994]: DEBUG oslo_concurrency.lockutils [req-06ff13ef-a3d2-4dbe-8729-71660ecb7198 req-0ef3114a-238b-4774-b37a-e8a72b7ed711 service nova] Lock "ab99499b-21a2-465b-9975-4e0adb18df94-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1040.910097] env[69994]: DEBUG nova.compute.manager [req-06ff13ef-a3d2-4dbe-8729-71660ecb7198 req-0ef3114a-238b-4774-b37a-e8a72b7ed711 service nova] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] No waiting events found dispatching network-vif-plugged-92378003-993a-43f2-8823-55a4b83acdef {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1040.910332] env[69994]: WARNING nova.compute.manager [req-06ff13ef-a3d2-4dbe-8729-71660ecb7198 req-0ef3114a-238b-4774-b37a-e8a72b7ed711 service nova] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Received unexpected event network-vif-plugged-92378003-993a-43f2-8823-55a4b83acdef for instance with vm_state building and task_state spawning. [ 1040.910503] env[69994]: DEBUG nova.compute.manager [req-06ff13ef-a3d2-4dbe-8729-71660ecb7198 req-0ef3114a-238b-4774-b37a-e8a72b7ed711 service nova] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Received event network-changed-92378003-993a-43f2-8823-55a4b83acdef {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1040.910821] env[69994]: DEBUG nova.compute.manager [req-06ff13ef-a3d2-4dbe-8729-71660ecb7198 req-0ef3114a-238b-4774-b37a-e8a72b7ed711 service nova] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Refreshing instance network info cache due to event network-changed-92378003-993a-43f2-8823-55a4b83acdef. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1040.910821] env[69994]: DEBUG oslo_concurrency.lockutils [req-06ff13ef-a3d2-4dbe-8729-71660ecb7198 req-0ef3114a-238b-4774-b37a-e8a72b7ed711 service nova] Acquiring lock "refresh_cache-ab99499b-21a2-465b-9975-4e0adb18df94" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.102838] env[69994]: DEBUG oslo_vmware.api [None req-f84af1ea-e7ad-4056-8594-efcd5391769a tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': task-3242386, 'name': PowerOffVM_Task, 'duration_secs': 0.235066} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.103148] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f84af1ea-e7ad-4056-8594-efcd5391769a tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1041.103336] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f84af1ea-e7ad-4056-8594-efcd5391769a tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1041.103604] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-567ddd79-2e0f-44c1-99c1-f515cd8e0fd6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.117326] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242387, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.133759} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.117570] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1041.118320] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2d3a306-ef7e-4a8d-9316-60b531ab1f4c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.139603] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] 384889a3-c3d9-4e0e-8d1c-95193cf4343d/384889a3-c3d9-4e0e-8d1c-95193cf4343d.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1041.140162] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-04e16586-3aa2-4aa3-b9d3-4ec8a074ed29 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.159216] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Waiting for the task: (returnval){ [ 1041.159216] env[69994]: value = "task-3242389" [ 1041.159216] env[69994]: _type = "Task" [ 1041.159216] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.167269] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242389, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.201781] env[69994]: DEBUG oslo_concurrency.lockutils [None req-86a3841a-c76e-47d9-a1ae-2ebb3bc91c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "29071eb9-6334-4c23-acb4-142c12aa448d" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1041.208369] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f84af1ea-e7ad-4056-8594-efcd5391769a tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1041.208591] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f84af1ea-e7ad-4056-8594-efcd5391769a tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1041.208776] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f84af1ea-e7ad-4056-8594-efcd5391769a tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Deleting the datastore file [datastore2] 81bae584-e558-4f96-9696-2510fed5a2e0 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1041.209045] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-869f065f-e918-407c-a0ed-fbced15929e2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.215946] env[69994]: DEBUG oslo_vmware.api [None req-f84af1ea-e7ad-4056-8594-efcd5391769a tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Waiting for the task: (returnval){ [ 1041.215946] env[69994]: value = "task-3242390" [ 1041.215946] env[69994]: _type = "Task" [ 1041.215946] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.223797] env[69994]: DEBUG oslo_vmware.api [None req-f84af1ea-e7ad-4056-8594-efcd5391769a tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': task-3242390, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.298830] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Releasing lock "refresh_cache-ab99499b-21a2-465b-9975-4e0adb18df94" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1041.299288] env[69994]: DEBUG nova.compute.manager [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Instance network_info: |[{"id": "92378003-993a-43f2-8823-55a4b83acdef", "address": "fa:16:3e:a4:fe:80", "network": {"id": "0e925a40-2706-4137-8dd4-eeed26ae606e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1885024452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "377f65074c2442588aee091b5165e1cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92378003-99", "ovs_interfaceid": "92378003-993a-43f2-8823-55a4b83acdef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1041.299661] env[69994]: DEBUG oslo_concurrency.lockutils [req-06ff13ef-a3d2-4dbe-8729-71660ecb7198 req-0ef3114a-238b-4774-b37a-e8a72b7ed711 service nova] Acquired lock "refresh_cache-ab99499b-21a2-465b-9975-4e0adb18df94" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1041.299869] env[69994]: DEBUG nova.network.neutron [req-06ff13ef-a3d2-4dbe-8729-71660ecb7198 req-0ef3114a-238b-4774-b37a-e8a72b7ed711 service nova] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Refreshing network info cache for port 92378003-993a-43f2-8823-55a4b83acdef {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1041.301297] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a4:fe:80', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '87bbf4e0-9064-4516-b7e7-44973f817205', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '92378003-993a-43f2-8823-55a4b83acdef', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1041.310448] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1041.314324] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1041.315051] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4d9be2eb-278a-4e92-88c9-b9c5dc45ac27 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.334065] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9c843595-559b-418c-976d-4ad9b9244a6e tempest-ServersNegativeTestJSON-2147077089 tempest-ServersNegativeTestJSON-2147077089-project-member] Lock "6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.673s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1041.339328] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1041.339328] env[69994]: value = "task-3242391" [ 1041.339328] env[69994]: _type = "Task" [ 1041.339328] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.348426] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242391, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.367354] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52665aed-5474-caa3-a48c-35363aba30de, 'name': SearchDatastore_Task, 'duration_secs': 0.011055} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.367792] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1041.367943] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 799bf051-86b4-45bd-b9bf-df767074dac8/799bf051-86b4-45bd-b9bf-df767074dac8.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1041.368217] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-16185a0b-6255-49f0-bc98-19bbe32966d2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.374817] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Waiting for the task: (returnval){ [ 1041.374817] env[69994]: value = "task-3242392" [ 1041.374817] env[69994]: _type = "Task" [ 1041.374817] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.386974] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242392, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.610091] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04dc27db-c9a9-4e26-ad84-964a394ed604 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.621884] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a718a84-11eb-4579-9999-f8e9a5c20dce {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.659964] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06cfac96-99fb-4ebe-97c1-af915e2c99b6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.676269] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dc8703b-814a-467a-8769-076a92dd7cf3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.680903] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242389, 'name': ReconfigVM_Task, 'duration_secs': 0.308041} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.681527] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Reconfigured VM instance instance-00000058 to attach disk [datastore1] 384889a3-c3d9-4e0e-8d1c-95193cf4343d/384889a3-c3d9-4e0e-8d1c-95193cf4343d.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1041.682873] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-296258b0-4b69-48b6-b62b-204071a36f95 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.693949] env[69994]: DEBUG nova.compute.provider_tree [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1041.702127] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Waiting for the task: (returnval){ [ 1041.702127] env[69994]: value = "task-3242393" [ 1041.702127] env[69994]: _type = "Task" [ 1041.702127] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.713309] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242393, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.726515] env[69994]: DEBUG oslo_vmware.api [None req-f84af1ea-e7ad-4056-8594-efcd5391769a tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Task: {'id': task-3242390, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.188655} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.726894] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f84af1ea-e7ad-4056-8594-efcd5391769a tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1041.727158] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f84af1ea-e7ad-4056-8594-efcd5391769a tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1041.727359] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f84af1ea-e7ad-4056-8594-efcd5391769a tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1041.727560] env[69994]: INFO nova.compute.manager [None req-f84af1ea-e7ad-4056-8594-efcd5391769a tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1041.729975] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f84af1ea-e7ad-4056-8594-efcd5391769a tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1041.729975] env[69994]: DEBUG nova.compute.manager [-] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1041.729975] env[69994]: DEBUG nova.network.neutron [-] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1041.854883] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242391, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.887018] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242392, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.128150] env[69994]: DEBUG nova.network.neutron [req-06ff13ef-a3d2-4dbe-8729-71660ecb7198 req-0ef3114a-238b-4774-b37a-e8a72b7ed711 service nova] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Updated VIF entry in instance network info cache for port 92378003-993a-43f2-8823-55a4b83acdef. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1042.128564] env[69994]: DEBUG nova.network.neutron [req-06ff13ef-a3d2-4dbe-8729-71660ecb7198 req-0ef3114a-238b-4774-b37a-e8a72b7ed711 service nova] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Updating instance_info_cache with network_info: [{"id": "92378003-993a-43f2-8823-55a4b83acdef", "address": "fa:16:3e:a4:fe:80", "network": {"id": "0e925a40-2706-4137-8dd4-eeed26ae606e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1885024452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "377f65074c2442588aee091b5165e1cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92378003-99", "ovs_interfaceid": "92378003-993a-43f2-8823-55a4b83acdef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1042.213460] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242393, 'name': Rename_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.217584] env[69994]: ERROR nova.scheduler.client.report [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [req-ffde3ffb-ab89-44b9-9714-f5c36232400f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 92ce3c95-4efe-4d04-802b-6b187afc5aa7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ffde3ffb-ab89-44b9-9714-f5c36232400f"}]} [ 1042.239445] env[69994]: DEBUG nova.scheduler.client.report [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Refreshing inventories for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1042.256459] env[69994]: DEBUG nova.scheduler.client.report [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Updating ProviderTree inventory for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1042.256649] env[69994]: DEBUG nova.compute.provider_tree [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1042.269471] env[69994]: DEBUG nova.compute.manager [req-189d6140-cd51-43fb-9ed9-813d596c51d8 req-c49d539f-9aa8-4674-91bc-508aa3ed49f5 service nova] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Received event network-vif-deleted-a696f1f6-260e-4d3b-a302-98dc2de9b949 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1042.269799] env[69994]: INFO nova.compute.manager [req-189d6140-cd51-43fb-9ed9-813d596c51d8 req-c49d539f-9aa8-4674-91bc-508aa3ed49f5 service nova] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Neutron deleted interface a696f1f6-260e-4d3b-a302-98dc2de9b949; detaching it from the instance and deleting it from the info cache [ 1042.269926] env[69994]: DEBUG nova.network.neutron [req-189d6140-cd51-43fb-9ed9-813d596c51d8 req-c49d539f-9aa8-4674-91bc-508aa3ed49f5 service nova] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Updating instance_info_cache with network_info: [{"id": "f3cb974c-43b4-4fda-966b-aee04d3459a9", "address": "fa:16:3e:51:b7:6e", "network": {"id": "e93dab81-5f07-43a3-9679-60bc0c54bc79", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-5660800", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.147", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "978da68b62d8409da5d8c8a45cd985c0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d2742ba-c3af-4412-877d-c2811dfeba46", "external-id": "nsx-vlan-transportzone-390", "segmentation_id": 390, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3cb974c-43", "ovs_interfaceid": "f3cb974c-43b4-4fda-966b-aee04d3459a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1042.271867] env[69994]: DEBUG nova.scheduler.client.report [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Refreshing aggregate associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1042.297490] env[69994]: DEBUG nova.scheduler.client.report [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Refreshing trait associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1042.305493] env[69994]: DEBUG oslo_concurrency.lockutils [None req-86a3841a-c76e-47d9-a1ae-2ebb3bc91c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Acquiring lock "29071eb9-6334-4c23-acb4-142c12aa448d" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1042.305493] env[69994]: DEBUG oslo_concurrency.lockutils [None req-86a3841a-c76e-47d9-a1ae-2ebb3bc91c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "29071eb9-6334-4c23-acb4-142c12aa448d" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1042.307920] env[69994]: INFO nova.compute.manager [None req-86a3841a-c76e-47d9-a1ae-2ebb3bc91c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Attaching volume 42846572-fbb3-484d-863b-3efb63333e94 to /dev/sdc [ 1042.356652] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242391, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.358886] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b90aafc7-4619-4771-92b2-bd15b0b25d56 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.365333] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6e4f9f9-f53d-4137-a544-9574c57b52cc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.384310] env[69994]: DEBUG nova.virt.block_device [None req-86a3841a-c76e-47d9-a1ae-2ebb3bc91c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Updating existing volume attachment record: 3702831f-5f6b-4d64-bbea-3f1ea12447e6 {{(pid=69994) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1042.392293] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242392, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.552746} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.392553] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 799bf051-86b4-45bd-b9bf-df767074dac8/799bf051-86b4-45bd-b9bf-df767074dac8.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1042.394300] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1042.394300] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7310c579-0b4a-45d0-9750-4392420c90be {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.398780] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Waiting for the task: (returnval){ [ 1042.398780] env[69994]: value = "task-3242394" [ 1042.398780] env[69994]: _type = "Task" [ 1042.398780] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.406472] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242394, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.631217] env[69994]: DEBUG oslo_concurrency.lockutils [req-06ff13ef-a3d2-4dbe-8729-71660ecb7198 req-0ef3114a-238b-4774-b37a-e8a72b7ed711 service nova] Releasing lock "refresh_cache-ab99499b-21a2-465b-9975-4e0adb18df94" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1042.644059] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83c4a4e7-6b80-488b-b672-38fe290385d6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.650659] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f843b5d-ebb3-45e9-be4b-47377bd68425 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.685766] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b8317b4-0053-4357-b90e-c4b2272e789f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.693545] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6815f646-eb6a-4d30-a02f-84cdc851a6e5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.707965] env[69994]: DEBUG nova.compute.provider_tree [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1042.717966] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242393, 'name': Rename_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.775747] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-37930063-ed1e-4e54-965f-e0efb98cfb7a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.786206] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0747d891-c517-48a0-a943-1b46278e5cd2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.821508] env[69994]: DEBUG nova.compute.manager [req-189d6140-cd51-43fb-9ed9-813d596c51d8 req-c49d539f-9aa8-4674-91bc-508aa3ed49f5 service nova] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Detach interface failed, port_id=a696f1f6-260e-4d3b-a302-98dc2de9b949, reason: Instance 81bae584-e558-4f96-9696-2510fed5a2e0 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1042.852517] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242391, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.908632] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242394, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070003} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.908942] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1042.909851] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-472fa476-f2b9-420f-bca1-a068a2ceee99 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.935039] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] 799bf051-86b4-45bd-b9bf-df767074dac8/799bf051-86b4-45bd-b9bf-df767074dac8.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1042.935315] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2369cc92-189e-4b21-aea2-9c008d6e3b8f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.953625] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Waiting for the task: (returnval){ [ 1042.953625] env[69994]: value = "task-3242396" [ 1042.953625] env[69994]: _type = "Task" [ 1042.953625] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.961487] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242396, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.219298] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242393, 'name': Rename_Task, 'duration_secs': 1.369471} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.219562] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1043.219864] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a6c014f8-acd5-4c01-be39-9bffbb815834 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.225846] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Waiting for the task: (returnval){ [ 1043.225846] env[69994]: value = "task-3242397" [ 1043.225846] env[69994]: _type = "Task" [ 1043.225846] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.234399] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242397, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.255020] env[69994]: DEBUG nova.scheduler.client.report [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Updated inventory for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with generation 120 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1043.255020] env[69994]: DEBUG nova.compute.provider_tree [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Updating resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 generation from 120 to 121 during operation: update_inventory {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1043.255020] env[69994]: DEBUG nova.compute.provider_tree [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1043.310081] env[69994]: DEBUG nova.network.neutron [-] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1043.356669] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242391, 'name': CreateVM_Task, 'duration_secs': 1.538743} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.356884] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1043.357612] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.357779] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1043.358116] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1043.358661] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ccbd7f1-b011-4538-9f02-804374f81d66 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.363668] env[69994]: DEBUG oslo_vmware.api [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1043.363668] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]522d413f-5a17-aa21-8b78-7f28b295c1e2" [ 1043.363668] env[69994]: _type = "Task" [ 1043.363668] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.371416] env[69994]: DEBUG oslo_vmware.api [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]522d413f-5a17-aa21-8b78-7f28b295c1e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.464579] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242396, 'name': ReconfigVM_Task, 'duration_secs': 0.491254} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.466403] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Reconfigured VM instance instance-00000059 to attach disk [datastore1] 799bf051-86b4-45bd-b9bf-df767074dac8/799bf051-86b4-45bd-b9bf-df767074dac8.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1043.467106] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0718aee3-17c3-4026-bc55-32da2f6a834f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.474553] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Waiting for the task: (returnval){ [ 1043.474553] env[69994]: value = "task-3242398" [ 1043.474553] env[69994]: _type = "Task" [ 1043.474553] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.486698] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242398, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.740437] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242397, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.761666] env[69994]: DEBUG oslo_concurrency.lockutils [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.964s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1043.766067] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04ace5a3-ec54-4fb8-a912-98bbd37a18e3 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.895s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1043.769726] env[69994]: DEBUG nova.objects.instance [None req-04ace5a3-ec54-4fb8-a912-98bbd37a18e3 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Lazy-loading 'resources' on Instance uuid 95b7d534-ac5b-4982-830d-bf65ecd610b3 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1043.794202] env[69994]: INFO nova.scheduler.client.report [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Deleted allocations for instance 8f5a5852-cd78-434f-b413-3cc2314575bb [ 1043.813187] env[69994]: INFO nova.compute.manager [-] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Took 2.08 seconds to deallocate network for instance. [ 1043.882547] env[69994]: DEBUG oslo_vmware.api [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]522d413f-5a17-aa21-8b78-7f28b295c1e2, 'name': SearchDatastore_Task, 'duration_secs': 0.014664} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.882953] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1043.883210] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1043.883449] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.883598] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1043.883781] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1043.884071] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-75bcd4ec-5e81-4a35-90b3-1982906d8b5f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.894736] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1043.895026] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1043.896207] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7a6bcc4-42e6-44aa-90b9-c821838e016c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.902746] env[69994]: DEBUG oslo_vmware.api [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1043.902746] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5234bd65-127b-41dd-f7bc-9bcb0892bb3a" [ 1043.902746] env[69994]: _type = "Task" [ 1043.902746] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.913229] env[69994]: DEBUG oslo_vmware.api [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5234bd65-127b-41dd-f7bc-9bcb0892bb3a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.987257] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242398, 'name': Rename_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.236912] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242397, 'name': PowerOnVM_Task, 'duration_secs': 0.558595} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.237112] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1044.237317] env[69994]: INFO nova.compute.manager [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Took 10.08 seconds to spawn the instance on the hypervisor. [ 1044.237491] env[69994]: DEBUG nova.compute.manager [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1044.238272] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49cf3584-1012-44c2-9520-d1aa51d9608d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.305247] env[69994]: DEBUG oslo_concurrency.lockutils [None req-dc371a11-3528-4c97-b2cc-64f6c55f3df4 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "8f5a5852-cd78-434f-b413-3cc2314575bb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.737s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1044.320547] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f84af1ea-e7ad-4056-8594-efcd5391769a tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1044.414681] env[69994]: DEBUG oslo_vmware.api [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5234bd65-127b-41dd-f7bc-9bcb0892bb3a, 'name': SearchDatastore_Task, 'duration_secs': 0.013185} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.418026] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ceae9916-cef0-43c5-b07d-127478ad8190 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.423681] env[69994]: DEBUG oslo_vmware.api [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1044.423681] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52f381a3-a604-e45e-345d-805d4c2dab26" [ 1044.423681] env[69994]: _type = "Task" [ 1044.423681] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.432139] env[69994]: DEBUG oslo_vmware.api [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52f381a3-a604-e45e-345d-805d4c2dab26, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.463208] env[69994]: DEBUG nova.compute.manager [req-0d81ef8d-0ca7-4e80-b832-2ea7eee35475 req-325f3a38-76c2-4633-8b27-a6216fb6df6e service nova] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Received event network-vif-deleted-f3cb974c-43b4-4fda-966b-aee04d3459a9 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1044.488900] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242398, 'name': Rename_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.567063] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-953aaa04-0e71-4534-a7a6-7adacdb36e67 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.575937] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d28eb9d-746a-43a2-9b52-f953abf5ec27 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.615830] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6613dfde-9ece-4de8-9b7d-0c892ec5cd5b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.628462] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15d9008e-332c-4804-ac1e-a5f230bdeb88 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.647032] env[69994]: DEBUG nova.compute.provider_tree [None req-04ace5a3-ec54-4fb8-a912-98bbd37a18e3 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1044.754218] env[69994]: INFO nova.compute.manager [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Took 27.59 seconds to build instance. [ 1044.935029] env[69994]: DEBUG oslo_vmware.api [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52f381a3-a604-e45e-345d-805d4c2dab26, 'name': SearchDatastore_Task, 'duration_secs': 0.010999} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.935267] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1044.935481] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] ab99499b-21a2-465b-9975-4e0adb18df94/ab99499b-21a2-465b-9975-4e0adb18df94.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1044.935722] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c8c10929-6fbf-4b09-95e7-fac25f8d6a59 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.943067] env[69994]: DEBUG oslo_vmware.api [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1044.943067] env[69994]: value = "task-3242400" [ 1044.943067] env[69994]: _type = "Task" [ 1044.943067] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.951402] env[69994]: DEBUG oslo_vmware.api [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242400, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.985064] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242398, 'name': Rename_Task, 'duration_secs': 1.052698} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.985351] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1044.985613] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bc90ecae-da33-4438-b5ec-24dc00a031aa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.992418] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Waiting for the task: (returnval){ [ 1044.992418] env[69994]: value = "task-3242401" [ 1044.992418] env[69994]: _type = "Task" [ 1044.992418] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.000971] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242401, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.147507] env[69994]: DEBUG nova.scheduler.client.report [None req-04ace5a3-ec54-4fb8-a912-98bbd37a18e3 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1045.256845] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Lock "384889a3-c3d9-4e0e-8d1c-95193cf4343d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.108s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1045.308347] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Acquiring lock "0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1045.308757] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Lock "0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1045.454303] env[69994]: DEBUG oslo_vmware.api [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242400, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.503746] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242401, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.652051] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04ace5a3-ec54-4fb8-a912-98bbd37a18e3 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.886s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1045.655046] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.141s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1045.656787] env[69994]: INFO nova.compute.claims [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1045.688421] env[69994]: INFO nova.scheduler.client.report [None req-04ace5a3-ec54-4fb8-a912-98bbd37a18e3 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Deleted allocations for instance 95b7d534-ac5b-4982-830d-bf65ecd610b3 [ 1045.812125] env[69994]: DEBUG nova.compute.manager [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1045.953514] env[69994]: DEBUG oslo_vmware.api [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242400, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.527689} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.953817] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] ab99499b-21a2-465b-9975-4e0adb18df94/ab99499b-21a2-465b-9975-4e0adb18df94.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1045.953993] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1045.954249] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-64a20f9d-d11f-4091-abc0-bea76f348475 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.960613] env[69994]: DEBUG oslo_vmware.api [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1045.960613] env[69994]: value = "task-3242402" [ 1045.960613] env[69994]: _type = "Task" [ 1045.960613] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.970230] env[69994]: DEBUG oslo_vmware.api [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242402, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.003749] env[69994]: DEBUG oslo_vmware.api [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242401, 'name': PowerOnVM_Task, 'duration_secs': 0.632152} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.003749] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1046.003749] env[69994]: INFO nova.compute.manager [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Took 9.47 seconds to spawn the instance on the hypervisor. [ 1046.003749] env[69994]: DEBUG nova.compute.manager [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1046.004509] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9510cdbc-9bc1-44ff-9049-a54df55f2a63 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.199081] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04ace5a3-ec54-4fb8-a912-98bbd37a18e3 tempest-ServerAddressesTestJSON-1737865411 tempest-ServerAddressesTestJSON-1737865411-project-member] Lock "95b7d534-ac5b-4982-830d-bf65ecd610b3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.872s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1046.332273] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1046.471212] env[69994]: DEBUG oslo_vmware.api [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242402, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062471} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.471579] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1046.473632] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57dc43ad-3639-41ad-bb0f-2388f2c76246 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.499103] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] ab99499b-21a2-465b-9975-4e0adb18df94/ab99499b-21a2-465b-9975-4e0adb18df94.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1046.499898] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-00c32a3b-ec8e-4d87-9d39-02415a7d8fe6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.524032] env[69994]: INFO nova.compute.manager [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Took 29.33 seconds to build instance. [ 1046.525561] env[69994]: DEBUG oslo_vmware.api [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1046.525561] env[69994]: value = "task-3242403" [ 1046.525561] env[69994]: _type = "Task" [ 1046.525561] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.533612] env[69994]: DEBUG oslo_vmware.api [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242403, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.938965] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-86a3841a-c76e-47d9-a1ae-2ebb3bc91c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Volume attach. Driver type: vmdk {{(pid=69994) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1046.938965] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-86a3841a-c76e-47d9-a1ae-2ebb3bc91c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647991', 'volume_id': '42846572-fbb3-484d-863b-3efb63333e94', 'name': 'volume-42846572-fbb3-484d-863b-3efb63333e94', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '29071eb9-6334-4c23-acb4-142c12aa448d', 'attached_at': '', 'detached_at': '', 'volume_id': '42846572-fbb3-484d-863b-3efb63333e94', 'serial': '42846572-fbb3-484d-863b-3efb63333e94'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1046.940551] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11df952f-2f34-499c-8cec-dd7591c01385 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.947907] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0e2dae5-cba5-42b0-88c4-b50675128762 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.979630] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a6699ef-d61e-48f7-a7de-b94806b1704c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.985980] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a96b9c56-5f96-49eb-8fc2-327209243782 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.073744] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-86a3841a-c76e-47d9-a1ae-2ebb3bc91c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Reconfiguring VM instance instance-0000004b to attach disk [datastore2] volume-42846572-fbb3-484d-863b-3efb63333e94/volume-42846572-fbb3-484d-863b-3efb63333e94.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1047.074391] env[69994]: DEBUG oslo_concurrency.lockutils [None req-11df5b1b-4cb7-4f11-a47f-a490dfb02e65 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Lock "799bf051-86b4-45bd-b9bf-df767074dac8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.889s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1047.075696] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-10aca5f7-13af-4dac-85ee-b29c2f4dc3fb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.102654] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d231199-bbb3-4c21-b968-4a8ecc3f3536 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.114457] env[69994]: DEBUG oslo_vmware.api [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242403, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.119162] env[69994]: DEBUG oslo_vmware.api [None req-86a3841a-c76e-47d9-a1ae-2ebb3bc91c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for the task: (returnval){ [ 1047.119162] env[69994]: value = "task-3242404" [ 1047.119162] env[69994]: _type = "Task" [ 1047.119162] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.121707] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3136786d-dd51-4760-8df4-cd5b2b02f1a0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.148166] env[69994]: DEBUG nova.compute.provider_tree [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1047.157687] env[69994]: DEBUG oslo_vmware.api [None req-86a3841a-c76e-47d9-a1ae-2ebb3bc91c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3242404, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.585205] env[69994]: DEBUG oslo_vmware.api [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242403, 'name': ReconfigVM_Task, 'duration_secs': 1.047567} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.585502] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Reconfigured VM instance instance-0000005a to attach disk [datastore1] ab99499b-21a2-465b-9975-4e0adb18df94/ab99499b-21a2-465b-9975-4e0adb18df94.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1047.586166] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-93606336-babc-4d96-a23b-78dbcf0c4872 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.591992] env[69994]: DEBUG oslo_vmware.api [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1047.591992] env[69994]: value = "task-3242405" [ 1047.591992] env[69994]: _type = "Task" [ 1047.591992] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.600641] env[69994]: DEBUG oslo_vmware.api [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242405, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.628916] env[69994]: DEBUG oslo_vmware.api [None req-86a3841a-c76e-47d9-a1ae-2ebb3bc91c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3242404, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.680609] env[69994]: ERROR nova.scheduler.client.report [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [req-68019f61-84c8-4b4f-9099-2389627156d6] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 92ce3c95-4efe-4d04-802b-6b187afc5aa7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-68019f61-84c8-4b4f-9099-2389627156d6"}]} [ 1047.699470] env[69994]: DEBUG nova.scheduler.client.report [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Refreshing inventories for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1047.715997] env[69994]: DEBUG nova.scheduler.client.report [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Updating ProviderTree inventory for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1047.716239] env[69994]: DEBUG nova.compute.provider_tree [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1047.729133] env[69994]: DEBUG nova.scheduler.client.report [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Refreshing aggregate associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1047.748761] env[69994]: DEBUG nova.scheduler.client.report [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Refreshing trait associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1047.875080] env[69994]: DEBUG oslo_concurrency.lockutils [None req-240441be-35b5-4bfd-b6fc-18945eedf5a2 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Acquiring lock "071151e4-a3ee-4a89-8b83-19bef3fb7d3e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1047.875080] env[69994]: DEBUG oslo_concurrency.lockutils [None req-240441be-35b5-4bfd-b6fc-18945eedf5a2 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Lock "071151e4-a3ee-4a89-8b83-19bef3fb7d3e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1047.875344] env[69994]: DEBUG oslo_concurrency.lockutils [None req-240441be-35b5-4bfd-b6fc-18945eedf5a2 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Acquiring lock "071151e4-a3ee-4a89-8b83-19bef3fb7d3e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1047.875595] env[69994]: DEBUG oslo_concurrency.lockutils [None req-240441be-35b5-4bfd-b6fc-18945eedf5a2 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Lock "071151e4-a3ee-4a89-8b83-19bef3fb7d3e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1047.875781] env[69994]: DEBUG oslo_concurrency.lockutils [None req-240441be-35b5-4bfd-b6fc-18945eedf5a2 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Lock "071151e4-a3ee-4a89-8b83-19bef3fb7d3e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1047.880249] env[69994]: INFO nova.compute.manager [None req-240441be-35b5-4bfd-b6fc-18945eedf5a2 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Terminating instance [ 1048.038298] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0900d589-eef6-4677-a2d6-84c3b56aecf1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.046584] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afc28c06-93a1-4d8e-b7aa-5f7a6e8a2684 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.081102] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69a519bc-8268-4884-887e-2165756dddc4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.090404] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d98b2897-7263-4d94-99ea-7f1456436fcc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.111936] env[69994]: DEBUG nova.compute.provider_tree [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1048.116228] env[69994]: DEBUG oslo_vmware.api [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242405, 'name': Rename_Task, 'duration_secs': 0.272176} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.116488] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1048.116731] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7eca0e87-d53c-4e98-be25-83f34d21d88c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.125717] env[69994]: DEBUG oslo_vmware.api [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1048.125717] env[69994]: value = "task-3242406" [ 1048.125717] env[69994]: _type = "Task" [ 1048.125717] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.134906] env[69994]: DEBUG oslo_vmware.api [None req-86a3841a-c76e-47d9-a1ae-2ebb3bc91c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3242404, 'name': ReconfigVM_Task, 'duration_secs': 0.78238} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.135266] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-86a3841a-c76e-47d9-a1ae-2ebb3bc91c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Reconfigured VM instance instance-0000004b to attach disk [datastore2] volume-42846572-fbb3-484d-863b-3efb63333e94/volume-42846572-fbb3-484d-863b-3efb63333e94.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1048.141255] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c8fbe31a-787a-4c70-ae1c-01501771d8c4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.157584] env[69994]: DEBUG oslo_vmware.api [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242406, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.162884] env[69994]: DEBUG oslo_vmware.api [None req-86a3841a-c76e-47d9-a1ae-2ebb3bc91c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for the task: (returnval){ [ 1048.162884] env[69994]: value = "task-3242407" [ 1048.162884] env[69994]: _type = "Task" [ 1048.162884] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.172106] env[69994]: DEBUG oslo_vmware.api [None req-86a3841a-c76e-47d9-a1ae-2ebb3bc91c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3242407, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.388777] env[69994]: DEBUG nova.compute.manager [None req-240441be-35b5-4bfd-b6fc-18945eedf5a2 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1048.389104] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-240441be-35b5-4bfd-b6fc-18945eedf5a2 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1048.390047] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdcc535f-0686-4fb5-b1b8-b1ff8857dd30 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.397990] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-240441be-35b5-4bfd-b6fc-18945eedf5a2 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1048.398285] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d9015a76-39ba-491d-9ff4-0a2bf2c0c68a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.405182] env[69994]: DEBUG oslo_vmware.api [None req-240441be-35b5-4bfd-b6fc-18945eedf5a2 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Waiting for the task: (returnval){ [ 1048.405182] env[69994]: value = "task-3242408" [ 1048.405182] env[69994]: _type = "Task" [ 1048.405182] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.416691] env[69994]: DEBUG oslo_vmware.api [None req-240441be-35b5-4bfd-b6fc-18945eedf5a2 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242408, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.637334] env[69994]: DEBUG oslo_vmware.api [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242406, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.656028] env[69994]: DEBUG nova.scheduler.client.report [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Updated inventory for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with generation 122 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1048.656245] env[69994]: DEBUG nova.compute.provider_tree [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Updating resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 generation from 122 to 123 during operation: update_inventory {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1048.656283] env[69994]: DEBUG nova.compute.provider_tree [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1048.672189] env[69994]: DEBUG oslo_vmware.api [None req-86a3841a-c76e-47d9-a1ae-2ebb3bc91c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3242407, 'name': ReconfigVM_Task, 'duration_secs': 0.198649} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.672394] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-86a3841a-c76e-47d9-a1ae-2ebb3bc91c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647991', 'volume_id': '42846572-fbb3-484d-863b-3efb63333e94', 'name': 'volume-42846572-fbb3-484d-863b-3efb63333e94', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '29071eb9-6334-4c23-acb4-142c12aa448d', 'attached_at': '', 'detached_at': '', 'volume_id': '42846572-fbb3-484d-863b-3efb63333e94', 'serial': '42846572-fbb3-484d-863b-3efb63333e94'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1048.915172] env[69994]: DEBUG oslo_vmware.api [None req-240441be-35b5-4bfd-b6fc-18945eedf5a2 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242408, 'name': PowerOffVM_Task, 'duration_secs': 0.180893} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.915445] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-240441be-35b5-4bfd-b6fc-18945eedf5a2 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1048.915620] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-240441be-35b5-4bfd-b6fc-18945eedf5a2 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1048.915870] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-69e63c80-9f47-46a8-870a-c302b3652586 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.005474] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-240441be-35b5-4bfd-b6fc-18945eedf5a2 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1049.005811] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-240441be-35b5-4bfd-b6fc-18945eedf5a2 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1049.006098] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-240441be-35b5-4bfd-b6fc-18945eedf5a2 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Deleting the datastore file [datastore1] 071151e4-a3ee-4a89-8b83-19bef3fb7d3e {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1049.006471] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7d99ff60-f3e4-42c4-b675-be20a8078209 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.020425] env[69994]: DEBUG oslo_vmware.api [None req-240441be-35b5-4bfd-b6fc-18945eedf5a2 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Waiting for the task: (returnval){ [ 1049.020425] env[69994]: value = "task-3242410" [ 1049.020425] env[69994]: _type = "Task" [ 1049.020425] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.038272] env[69994]: DEBUG oslo_vmware.api [None req-240441be-35b5-4bfd-b6fc-18945eedf5a2 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242410, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.136914] env[69994]: DEBUG oslo_vmware.api [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242406, 'name': PowerOnVM_Task, 'duration_secs': 0.53406} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.137195] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1049.137402] env[69994]: INFO nova.compute.manager [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Took 10.19 seconds to spawn the instance on the hypervisor. [ 1049.137749] env[69994]: DEBUG nova.compute.manager [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1049.138376] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-618410a6-f979-469c-99f4-02a516ccafa1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.161888] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.507s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1049.162094] env[69994]: DEBUG nova.compute.manager [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1049.164593] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2e783b19-7a3b-414c-a4fa-047ad5afedf3 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.410s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1049.164816] env[69994]: DEBUG nova.objects.instance [None req-2e783b19-7a3b-414c-a4fa-047ad5afedf3 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lazy-loading 'resources' on Instance uuid 290e8749-6860-4303-b966-65d2efee5499 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1049.531169] env[69994]: DEBUG oslo_vmware.api [None req-240441be-35b5-4bfd-b6fc-18945eedf5a2 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242410, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.655033] env[69994]: INFO nova.compute.manager [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Took 30.43 seconds to build instance. [ 1049.667998] env[69994]: DEBUG nova.compute.utils [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1049.669770] env[69994]: DEBUG nova.compute.manager [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1049.669770] env[69994]: DEBUG nova.network.neutron [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1049.710790] env[69994]: DEBUG nova.objects.instance [None req-86a3841a-c76e-47d9-a1ae-2ebb3bc91c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lazy-loading 'flavor' on Instance uuid 29071eb9-6334-4c23-acb4-142c12aa448d {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1049.716023] env[69994]: DEBUG nova.policy [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '761ebe718b0f48939612e82c6b1e6766', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a4c158f7555d4606b641be4264d95eaa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1049.923188] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc1eb288-6d35-4f10-b623-8e37c31f85b3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.931616] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-657a4e46-7443-4d9b-94b4-738226743cb4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.962532] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d21fe4e8-1af6-4fa6-b9d7-ff78e8e55d2d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.969679] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f35280a-8511-43f3-8692-19274cd83162 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.983776] env[69994]: DEBUG nova.compute.provider_tree [None req-2e783b19-7a3b-414c-a4fa-047ad5afedf3 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1049.989030] env[69994]: DEBUG nova.network.neutron [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Successfully created port: 624ca4e4-692a-43e2-a267-b5692e21bb35 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1050.030843] env[69994]: DEBUG oslo_vmware.api [None req-240441be-35b5-4bfd-b6fc-18945eedf5a2 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242410, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.572324} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.034039] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-240441be-35b5-4bfd-b6fc-18945eedf5a2 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1050.034039] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-240441be-35b5-4bfd-b6fc-18945eedf5a2 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1050.034039] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-240441be-35b5-4bfd-b6fc-18945eedf5a2 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1050.034039] env[69994]: INFO nova.compute.manager [None req-240441be-35b5-4bfd-b6fc-18945eedf5a2 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Took 1.64 seconds to destroy the instance on the hypervisor. [ 1050.034039] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-240441be-35b5-4bfd-b6fc-18945eedf5a2 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1050.034039] env[69994]: DEBUG nova.compute.manager [-] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1050.034039] env[69994]: DEBUG nova.network.neutron [-] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1050.156816] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0ecaabea-091b-4a6d-a1db-1a3397d3fcca tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "ab99499b-21a2-465b-9975-4e0adb18df94" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.942s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1050.176032] env[69994]: DEBUG nova.compute.manager [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1050.221286] env[69994]: DEBUG oslo_concurrency.lockutils [None req-86a3841a-c76e-47d9-a1ae-2ebb3bc91c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "29071eb9-6334-4c23-acb4-142c12aa448d" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.914s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1050.348381] env[69994]: DEBUG nova.compute.manager [req-1f68467c-e949-418c-ba59-90c2983d9c71 req-91a09a76-b9d8-42c2-b167-5347c95afa8d service nova] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Received event network-vif-deleted-8c465e61-001d-4355-b0db-ac3a4e1aeec6 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1050.348381] env[69994]: INFO nova.compute.manager [req-1f68467c-e949-418c-ba59-90c2983d9c71 req-91a09a76-b9d8-42c2-b167-5347c95afa8d service nova] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Neutron deleted interface 8c465e61-001d-4355-b0db-ac3a4e1aeec6; detaching it from the instance and deleting it from the info cache [ 1050.348381] env[69994]: DEBUG nova.network.neutron [req-1f68467c-e949-418c-ba59-90c2983d9c71 req-91a09a76-b9d8-42c2-b167-5347c95afa8d service nova] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1050.487511] env[69994]: DEBUG nova.scheduler.client.report [None req-2e783b19-7a3b-414c-a4fa-047ad5afedf3 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1050.806642] env[69994]: DEBUG nova.network.neutron [-] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1050.851953] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c58934cc-054b-47c1-aefd-73c519a7c674 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.861748] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c3f11c4-de19-4cc3-9ead-deffd3ed26fb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.894067] env[69994]: DEBUG nova.compute.manager [req-1f68467c-e949-418c-ba59-90c2983d9c71 req-91a09a76-b9d8-42c2-b167-5347c95afa8d service nova] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Detach interface failed, port_id=8c465e61-001d-4355-b0db-ac3a4e1aeec6, reason: Instance 071151e4-a3ee-4a89-8b83-19bef3fb7d3e could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1050.992864] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2e783b19-7a3b-414c-a4fa-047ad5afedf3 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.828s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1050.995389] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c56e45b0-111d-4f2f-bfd5-f64a40d17ec3 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.098s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1050.995623] env[69994]: DEBUG nova.objects.instance [None req-c56e45b0-111d-4f2f-bfd5-f64a40d17ec3 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lazy-loading 'resources' on Instance uuid 47e80abc-2f7e-432c-bd2f-3064841401fc {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1051.012949] env[69994]: INFO nova.scheduler.client.report [None req-2e783b19-7a3b-414c-a4fa-047ad5afedf3 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Deleted allocations for instance 290e8749-6860-4303-b966-65d2efee5499 [ 1051.016561] env[69994]: DEBUG oslo_concurrency.lockutils [None req-dbabac07-219f-4ef0-bf8a-f8e9d0a541a0 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Acquiring lock "29071eb9-6334-4c23-acb4-142c12aa448d" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1051.016850] env[69994]: DEBUG oslo_concurrency.lockutils [None req-dbabac07-219f-4ef0-bf8a-f8e9d0a541a0 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "29071eb9-6334-4c23-acb4-142c12aa448d" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1051.185637] env[69994]: DEBUG nova.compute.manager [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1051.212686] env[69994]: DEBUG nova.virt.hardware [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1051.212929] env[69994]: DEBUG nova.virt.hardware [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1051.213099] env[69994]: DEBUG nova.virt.hardware [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1051.213284] env[69994]: DEBUG nova.virt.hardware [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1051.213452] env[69994]: DEBUG nova.virt.hardware [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1051.213638] env[69994]: DEBUG nova.virt.hardware [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1051.213852] env[69994]: DEBUG nova.virt.hardware [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1051.214024] env[69994]: DEBUG nova.virt.hardware [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1051.214202] env[69994]: DEBUG nova.virt.hardware [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1051.214366] env[69994]: DEBUG nova.virt.hardware [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1051.214570] env[69994]: DEBUG nova.virt.hardware [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1051.215441] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cd6a8bb-44f3-4ac8-a184-e3bd8764d9e1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.223658] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-634d4222-e66f-4f5c-a477-d2ffb928fdbe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.309034] env[69994]: INFO nova.compute.manager [-] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Took 1.28 seconds to deallocate network for instance. [ 1051.397122] env[69994]: DEBUG nova.compute.manager [req-421b0029-5591-406c-b6a1-0016ee92a700 req-4b3a18c9-6af1-404d-b74e-eace78e48c57 service nova] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Received event network-vif-plugged-624ca4e4-692a-43e2-a267-b5692e21bb35 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1051.397122] env[69994]: DEBUG oslo_concurrency.lockutils [req-421b0029-5591-406c-b6a1-0016ee92a700 req-4b3a18c9-6af1-404d-b74e-eace78e48c57 service nova] Acquiring lock "f00662a9-92e0-4520-9ced-3cfd6e83628b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1051.397122] env[69994]: DEBUG oslo_concurrency.lockutils [req-421b0029-5591-406c-b6a1-0016ee92a700 req-4b3a18c9-6af1-404d-b74e-eace78e48c57 service nova] Lock "f00662a9-92e0-4520-9ced-3cfd6e83628b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1051.397122] env[69994]: DEBUG oslo_concurrency.lockutils [req-421b0029-5591-406c-b6a1-0016ee92a700 req-4b3a18c9-6af1-404d-b74e-eace78e48c57 service nova] Lock "f00662a9-92e0-4520-9ced-3cfd6e83628b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1051.397122] env[69994]: DEBUG nova.compute.manager [req-421b0029-5591-406c-b6a1-0016ee92a700 req-4b3a18c9-6af1-404d-b74e-eace78e48c57 service nova] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] No waiting events found dispatching network-vif-plugged-624ca4e4-692a-43e2-a267-b5692e21bb35 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1051.397122] env[69994]: WARNING nova.compute.manager [req-421b0029-5591-406c-b6a1-0016ee92a700 req-4b3a18c9-6af1-404d-b74e-eace78e48c57 service nova] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Received unexpected event network-vif-plugged-624ca4e4-692a-43e2-a267-b5692e21bb35 for instance with vm_state building and task_state spawning. [ 1051.495956] env[69994]: DEBUG nova.network.neutron [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Successfully updated port: 624ca4e4-692a-43e2-a267-b5692e21bb35 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1051.522523] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2e783b19-7a3b-414c-a4fa-047ad5afedf3 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "290e8749-6860-4303-b966-65d2efee5499" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.521s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1051.523789] env[69994]: INFO nova.compute.manager [None req-dbabac07-219f-4ef0-bf8a-f8e9d0a541a0 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Detaching volume a81b55d5-7771-408e-8f27-6b9ddfd1b4d9 [ 1051.559252] env[69994]: INFO nova.virt.block_device [None req-dbabac07-219f-4ef0-bf8a-f8e9d0a541a0 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Attempting to driver detach volume a81b55d5-7771-408e-8f27-6b9ddfd1b4d9 from mountpoint /dev/sdb [ 1051.559496] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-dbabac07-219f-4ef0-bf8a-f8e9d0a541a0 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Volume detach. Driver type: vmdk {{(pid=69994) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1051.559686] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-dbabac07-219f-4ef0-bf8a-f8e9d0a541a0 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647983', 'volume_id': 'a81b55d5-7771-408e-8f27-6b9ddfd1b4d9', 'name': 'volume-a81b55d5-7771-408e-8f27-6b9ddfd1b4d9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '29071eb9-6334-4c23-acb4-142c12aa448d', 'attached_at': '', 'detached_at': '', 'volume_id': 'a81b55d5-7771-408e-8f27-6b9ddfd1b4d9', 'serial': 'a81b55d5-7771-408e-8f27-6b9ddfd1b4d9'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1051.560636] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5a65140-407d-4b2c-a61e-f10745ea88b0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.590445] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-899e9638-b3dc-4e26-99bb-e5b752d7ea7b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.600110] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76a09271-09e8-4d52-a197-dacce69a7c6b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.605085] env[69994]: DEBUG nova.compute.manager [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Stashing vm_state: active {{(pid=69994) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1051.633642] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dbb2a1e-1ba3-49aa-bc21-7324b9411139 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.649402] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-dbabac07-219f-4ef0-bf8a-f8e9d0a541a0 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] The volume has not been displaced from its original location: [datastore1] volume-a81b55d5-7771-408e-8f27-6b9ddfd1b4d9/volume-a81b55d5-7771-408e-8f27-6b9ddfd1b4d9.vmdk. No consolidation needed. {{(pid=69994) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1051.654720] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-dbabac07-219f-4ef0-bf8a-f8e9d0a541a0 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Reconfiguring VM instance instance-0000004b to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1051.657478] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a283a4f7-360c-4202-a501-0810ec91fe28 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.676497] env[69994]: DEBUG oslo_vmware.api [None req-dbabac07-219f-4ef0-bf8a-f8e9d0a541a0 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for the task: (returnval){ [ 1051.676497] env[69994]: value = "task-3242411" [ 1051.676497] env[69994]: _type = "Task" [ 1051.676497] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.686771] env[69994]: DEBUG oslo_vmware.api [None req-dbabac07-219f-4ef0-bf8a-f8e9d0a541a0 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3242411, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.791413] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd9eedb1-116a-4042-b250-97ebbf248cfb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.798594] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de705e32-6677-4d0b-83f2-ae12fc5c60aa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.828442] env[69994]: DEBUG oslo_concurrency.lockutils [None req-240441be-35b5-4bfd-b6fc-18945eedf5a2 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1051.829398] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-440bf1e1-026e-476e-b772-f2813b89d985 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.836715] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9665839-c119-4bbf-8a84-209a0177323f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.851373] env[69994]: DEBUG nova.compute.provider_tree [None req-c56e45b0-111d-4f2f-bfd5-f64a40d17ec3 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1052.001208] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "refresh_cache-f00662a9-92e0-4520-9ced-3cfd6e83628b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1052.001393] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquired lock "refresh_cache-f00662a9-92e0-4520-9ced-3cfd6e83628b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1052.001548] env[69994]: DEBUG nova.network.neutron [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1052.122783] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1052.187055] env[69994]: DEBUG oslo_vmware.api [None req-dbabac07-219f-4ef0-bf8a-f8e9d0a541a0 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3242411, 'name': ReconfigVM_Task, 'duration_secs': 0.236124} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.187371] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-dbabac07-219f-4ef0-bf8a-f8e9d0a541a0 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Reconfigured VM instance instance-0000004b to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1052.192562] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-50dae84e-a43c-4a13-8c58-b3fcf3509fab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.208756] env[69994]: DEBUG oslo_vmware.api [None req-dbabac07-219f-4ef0-bf8a-f8e9d0a541a0 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for the task: (returnval){ [ 1052.208756] env[69994]: value = "task-3242412" [ 1052.208756] env[69994]: _type = "Task" [ 1052.208756] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.216800] env[69994]: DEBUG oslo_vmware.api [None req-dbabac07-219f-4ef0-bf8a-f8e9d0a541a0 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3242412, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.354900] env[69994]: DEBUG nova.scheduler.client.report [None req-c56e45b0-111d-4f2f-bfd5-f64a40d17ec3 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1052.539049] env[69994]: DEBUG nova.network.neutron [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1052.670446] env[69994]: DEBUG nova.network.neutron [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Updating instance_info_cache with network_info: [{"id": "624ca4e4-692a-43e2-a267-b5692e21bb35", "address": "fa:16:3e:d5:88:45", "network": {"id": "7880c31a-48c5-419f-856f-539c513f7147", "bridge": "br-int", "label": "tempest-ImagesTestJSON-152259375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4c158f7555d4606b641be4264d95eaa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap624ca4e4-69", "ovs_interfaceid": "624ca4e4-692a-43e2-a267-b5692e21bb35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.719953] env[69994]: DEBUG oslo_vmware.api [None req-dbabac07-219f-4ef0-bf8a-f8e9d0a541a0 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3242412, 'name': ReconfigVM_Task, 'duration_secs': 0.1514} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.719953] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-dbabac07-219f-4ef0-bf8a-f8e9d0a541a0 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647983', 'volume_id': 'a81b55d5-7771-408e-8f27-6b9ddfd1b4d9', 'name': 'volume-a81b55d5-7771-408e-8f27-6b9ddfd1b4d9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '29071eb9-6334-4c23-acb4-142c12aa448d', 'attached_at': '', 'detached_at': '', 'volume_id': 'a81b55d5-7771-408e-8f27-6b9ddfd1b4d9', 'serial': 'a81b55d5-7771-408e-8f27-6b9ddfd1b4d9'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1052.859839] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c56e45b0-111d-4f2f-bfd5-f64a40d17ec3 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.864s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1052.862393] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.169s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1052.863854] env[69994]: INFO nova.compute.claims [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1052.885260] env[69994]: INFO nova.scheduler.client.report [None req-c56e45b0-111d-4f2f-bfd5-f64a40d17ec3 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Deleted allocations for instance 47e80abc-2f7e-432c-bd2f-3064841401fc [ 1052.963515] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d3724d59-c68e-49fb-acfe-5cfbf52d668a tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Acquiring lock "850930f9-d5fb-4546-9796-30e164a1cdd3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1052.963777] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d3724d59-c68e-49fb-acfe-5cfbf52d668a tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Lock "850930f9-d5fb-4546-9796-30e164a1cdd3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1052.963983] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d3724d59-c68e-49fb-acfe-5cfbf52d668a tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Acquiring lock "850930f9-d5fb-4546-9796-30e164a1cdd3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1052.964186] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d3724d59-c68e-49fb-acfe-5cfbf52d668a tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Lock "850930f9-d5fb-4546-9796-30e164a1cdd3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1052.964357] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d3724d59-c68e-49fb-acfe-5cfbf52d668a tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Lock "850930f9-d5fb-4546-9796-30e164a1cdd3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1052.966715] env[69994]: INFO nova.compute.manager [None req-d3724d59-c68e-49fb-acfe-5cfbf52d668a tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Terminating instance [ 1053.173540] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Releasing lock "refresh_cache-f00662a9-92e0-4520-9ced-3cfd6e83628b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1053.173906] env[69994]: DEBUG nova.compute.manager [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Instance network_info: |[{"id": "624ca4e4-692a-43e2-a267-b5692e21bb35", "address": "fa:16:3e:d5:88:45", "network": {"id": "7880c31a-48c5-419f-856f-539c513f7147", "bridge": "br-int", "label": "tempest-ImagesTestJSON-152259375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4c158f7555d4606b641be4264d95eaa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap624ca4e4-69", "ovs_interfaceid": "624ca4e4-692a-43e2-a267-b5692e21bb35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1053.174379] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d5:88:45', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4728adca-2846-416a-91a3-deb898faf1f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '624ca4e4-692a-43e2-a267-b5692e21bb35', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1053.182324] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1053.182512] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1053.183183] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b78a3113-6041-4fae-b166-825839a605d5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.203172] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1053.203172] env[69994]: value = "task-3242413" [ 1053.203172] env[69994]: _type = "Task" [ 1053.203172] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.211863] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242413, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.262995] env[69994]: DEBUG nova.objects.instance [None req-dbabac07-219f-4ef0-bf8a-f8e9d0a541a0 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lazy-loading 'flavor' on Instance uuid 29071eb9-6334-4c23-acb4-142c12aa448d {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1053.393282] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c56e45b0-111d-4f2f-bfd5-f64a40d17ec3 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "47e80abc-2f7e-432c-bd2f-3064841401fc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.366s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1053.422212] env[69994]: DEBUG nova.compute.manager [req-f8dd710f-b3e8-4c40-9dcc-8b9155689479 req-b9d2cc83-7cce-410b-bb39-a0389dc31f37 service nova] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Received event network-changed-624ca4e4-692a-43e2-a267-b5692e21bb35 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1053.422488] env[69994]: DEBUG nova.compute.manager [req-f8dd710f-b3e8-4c40-9dcc-8b9155689479 req-b9d2cc83-7cce-410b-bb39-a0389dc31f37 service nova] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Refreshing instance network info cache due to event network-changed-624ca4e4-692a-43e2-a267-b5692e21bb35. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1053.422716] env[69994]: DEBUG oslo_concurrency.lockutils [req-f8dd710f-b3e8-4c40-9dcc-8b9155689479 req-b9d2cc83-7cce-410b-bb39-a0389dc31f37 service nova] Acquiring lock "refresh_cache-f00662a9-92e0-4520-9ced-3cfd6e83628b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1053.422859] env[69994]: DEBUG oslo_concurrency.lockutils [req-f8dd710f-b3e8-4c40-9dcc-8b9155689479 req-b9d2cc83-7cce-410b-bb39-a0389dc31f37 service nova] Acquired lock "refresh_cache-f00662a9-92e0-4520-9ced-3cfd6e83628b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1053.423040] env[69994]: DEBUG nova.network.neutron [req-f8dd710f-b3e8-4c40-9dcc-8b9155689479 req-b9d2cc83-7cce-410b-bb39-a0389dc31f37 service nova] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Refreshing network info cache for port 624ca4e4-692a-43e2-a267-b5692e21bb35 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1053.470687] env[69994]: DEBUG nova.compute.manager [None req-d3724d59-c68e-49fb-acfe-5cfbf52d668a tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1053.470930] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d3724d59-c68e-49fb-acfe-5cfbf52d668a tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1053.471975] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b5fafba-85d1-4f63-aa15-65b413b0942a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.480282] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3724d59-c68e-49fb-acfe-5cfbf52d668a tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1053.480603] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-99eab240-8de5-4326-a285-fe8a211e6744 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.486466] env[69994]: DEBUG oslo_vmware.api [None req-d3724d59-c68e-49fb-acfe-5cfbf52d668a tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Waiting for the task: (returnval){ [ 1053.486466] env[69994]: value = "task-3242414" [ 1053.486466] env[69994]: _type = "Task" [ 1053.486466] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.495938] env[69994]: DEBUG oslo_vmware.api [None req-d3724d59-c68e-49fb-acfe-5cfbf52d668a tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Task: {'id': task-3242414, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.713761] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242413, 'name': CreateVM_Task, 'duration_secs': 0.293282} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.714954] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1053.714954] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1053.715100] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1053.715448] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1053.715709] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-32f01664-8d91-4df5-883b-ed27a3c10f5b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.720525] env[69994]: DEBUG oslo_vmware.api [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1053.720525] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5280c05d-638b-a864-606e-3b7664987d23" [ 1053.720525] env[69994]: _type = "Task" [ 1053.720525] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.728898] env[69994]: DEBUG oslo_vmware.api [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5280c05d-638b-a864-606e-3b7664987d23, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.999995] env[69994]: DEBUG oslo_vmware.api [None req-d3724d59-c68e-49fb-acfe-5cfbf52d668a tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Task: {'id': task-3242414, 'name': PowerOffVM_Task, 'duration_secs': 0.21349} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.000379] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3724d59-c68e-49fb-acfe-5cfbf52d668a tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1054.000454] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d3724d59-c68e-49fb-acfe-5cfbf52d668a tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1054.000976] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8c155082-2230-4768-9923-69e2c68e7392 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.066724] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d3724d59-c68e-49fb-acfe-5cfbf52d668a tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1054.066946] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d3724d59-c68e-49fb-acfe-5cfbf52d668a tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1054.067148] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3724d59-c68e-49fb-acfe-5cfbf52d668a tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Deleting the datastore file [datastore1] 850930f9-d5fb-4546-9796-30e164a1cdd3 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1054.067473] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1cf12d05-d4e0-4c8b-9874-8c8af0a546a1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.074214] env[69994]: DEBUG oslo_vmware.api [None req-d3724d59-c68e-49fb-acfe-5cfbf52d668a tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Waiting for the task: (returnval){ [ 1054.074214] env[69994]: value = "task-3242416" [ 1054.074214] env[69994]: _type = "Task" [ 1054.074214] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.081670] env[69994]: DEBUG oslo_vmware.api [None req-d3724d59-c68e-49fb-acfe-5cfbf52d668a tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Task: {'id': task-3242416, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.122473] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bbce5ea-412f-439a-838e-9709cd53869a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.130063] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a450e31d-283d-48ab-a69e-8deb6cbcd581 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.162893] env[69994]: DEBUG nova.network.neutron [req-f8dd710f-b3e8-4c40-9dcc-8b9155689479 req-b9d2cc83-7cce-410b-bb39-a0389dc31f37 service nova] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Updated VIF entry in instance network info cache for port 624ca4e4-692a-43e2-a267-b5692e21bb35. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1054.162893] env[69994]: DEBUG nova.network.neutron [req-f8dd710f-b3e8-4c40-9dcc-8b9155689479 req-b9d2cc83-7cce-410b-bb39-a0389dc31f37 service nova] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Updating instance_info_cache with network_info: [{"id": "624ca4e4-692a-43e2-a267-b5692e21bb35", "address": "fa:16:3e:d5:88:45", "network": {"id": "7880c31a-48c5-419f-856f-539c513f7147", "bridge": "br-int", "label": "tempest-ImagesTestJSON-152259375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4c158f7555d4606b641be4264d95eaa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap624ca4e4-69", "ovs_interfaceid": "624ca4e4-692a-43e2-a267-b5692e21bb35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1054.164701] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd17144b-feaa-4e88-9a68-785d18ec0706 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.173569] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-019601e8-5a62-4847-8b71-7fded1012957 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.188128] env[69994]: DEBUG nova.compute.provider_tree [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1054.230181] env[69994]: DEBUG oslo_vmware.api [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5280c05d-638b-a864-606e-3b7664987d23, 'name': SearchDatastore_Task, 'duration_secs': 0.009707} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.230502] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1054.230689] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1054.230925] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1054.231083] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1054.231259] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1054.231503] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6765ea6f-bd1a-42c9-8271-b3984fd9b16d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.238160] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1054.238335] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1054.238993] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9300161-fe1f-4b3d-9025-7577e8ad9d6d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.244013] env[69994]: DEBUG oslo_vmware.api [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1054.244013] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52a8aa6e-a250-5257-08d9-deb48ae6e3e0" [ 1054.244013] env[69994]: _type = "Task" [ 1054.244013] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.251280] env[69994]: DEBUG oslo_vmware.api [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52a8aa6e-a250-5257-08d9-deb48ae6e3e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.273184] env[69994]: DEBUG oslo_concurrency.lockutils [None req-dbabac07-219f-4ef0-bf8a-f8e9d0a541a0 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "29071eb9-6334-4c23-acb4-142c12aa448d" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.256s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1054.347637] env[69994]: DEBUG oslo_concurrency.lockutils [None req-54ca85fe-bbb4-4930-96e4-bc43d89b1c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Acquiring lock "29071eb9-6334-4c23-acb4-142c12aa448d" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1054.347637] env[69994]: DEBUG oslo_concurrency.lockutils [None req-54ca85fe-bbb4-4930-96e4-bc43d89b1c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "29071eb9-6334-4c23-acb4-142c12aa448d" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1054.587664] env[69994]: DEBUG oslo_vmware.api [None req-d3724d59-c68e-49fb-acfe-5cfbf52d668a tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Task: {'id': task-3242416, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14154} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.587920] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3724d59-c68e-49fb-acfe-5cfbf52d668a tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1054.588115] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d3724d59-c68e-49fb-acfe-5cfbf52d668a tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1054.588306] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d3724d59-c68e-49fb-acfe-5cfbf52d668a tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1054.588699] env[69994]: INFO nova.compute.manager [None req-d3724d59-c68e-49fb-acfe-5cfbf52d668a tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1054.588776] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d3724d59-c68e-49fb-acfe-5cfbf52d668a tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1054.589017] env[69994]: DEBUG nova.compute.manager [-] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1054.589128] env[69994]: DEBUG nova.network.neutron [-] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1054.634808] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "1a5b269f-5ee8-4bcc-812e-78388edb1e50" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1054.635084] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "1a5b269f-5ee8-4bcc-812e-78388edb1e50" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1054.668199] env[69994]: DEBUG oslo_concurrency.lockutils [req-f8dd710f-b3e8-4c40-9dcc-8b9155689479 req-b9d2cc83-7cce-410b-bb39-a0389dc31f37 service nova] Releasing lock "refresh_cache-f00662a9-92e0-4520-9ced-3cfd6e83628b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1054.690831] env[69994]: DEBUG nova.scheduler.client.report [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1054.754589] env[69994]: DEBUG oslo_vmware.api [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52a8aa6e-a250-5257-08d9-deb48ae6e3e0, 'name': SearchDatastore_Task, 'duration_secs': 0.007946} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.755386] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51fe1ccc-f589-449a-899e-54210fd449f2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.760993] env[69994]: DEBUG oslo_vmware.api [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1054.760993] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52c4021c-830e-f2f0-c679-7f21ed73ae42" [ 1054.760993] env[69994]: _type = "Task" [ 1054.760993] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.768969] env[69994]: DEBUG oslo_vmware.api [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52c4021c-830e-f2f0-c679-7f21ed73ae42, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.849606] env[69994]: INFO nova.compute.manager [None req-54ca85fe-bbb4-4930-96e4-bc43d89b1c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Detaching volume 42846572-fbb3-484d-863b-3efb63333e94 [ 1054.885258] env[69994]: INFO nova.virt.block_device [None req-54ca85fe-bbb4-4930-96e4-bc43d89b1c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Attempting to driver detach volume 42846572-fbb3-484d-863b-3efb63333e94 from mountpoint /dev/sdc [ 1054.885535] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-54ca85fe-bbb4-4930-96e4-bc43d89b1c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Volume detach. Driver type: vmdk {{(pid=69994) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1054.885737] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-54ca85fe-bbb4-4930-96e4-bc43d89b1c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647991', 'volume_id': '42846572-fbb3-484d-863b-3efb63333e94', 'name': 'volume-42846572-fbb3-484d-863b-3efb63333e94', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '29071eb9-6334-4c23-acb4-142c12aa448d', 'attached_at': '', 'detached_at': '', 'volume_id': '42846572-fbb3-484d-863b-3efb63333e94', 'serial': '42846572-fbb3-484d-863b-3efb63333e94'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1054.886694] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1758bcf5-0b06-405b-a193-887ad812dc3c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.912578] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-052910f0-9449-4068-a665-e5bc4d8c3219 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.919483] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0091f89-cc26-40a2-a446-84c28813c441 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.940500] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82125751-1350-4b22-8e67-bb3e91b607bd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.957600] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-54ca85fe-bbb4-4930-96e4-bc43d89b1c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] The volume has not been displaced from its original location: [datastore2] volume-42846572-fbb3-484d-863b-3efb63333e94/volume-42846572-fbb3-484d-863b-3efb63333e94.vmdk. No consolidation needed. {{(pid=69994) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1054.962922] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-54ca85fe-bbb4-4930-96e4-bc43d89b1c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Reconfiguring VM instance instance-0000004b to detach disk 2002 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1054.963310] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1fd5e3a1-541d-4755-9507-2f0ddbc3bc4c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.983917] env[69994]: DEBUG oslo_vmware.api [None req-54ca85fe-bbb4-4930-96e4-bc43d89b1c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for the task: (returnval){ [ 1054.983917] env[69994]: value = "task-3242417" [ 1054.983917] env[69994]: _type = "Task" [ 1054.983917] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.991966] env[69994]: DEBUG oslo_vmware.api [None req-54ca85fe-bbb4-4930-96e4-bc43d89b1c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3242417, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.047340] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b1cfe9a-1154-49a3-9bb7-2b35010110c1 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "e17fcc84-7c86-41b6-88ec-8a35619534b6" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1055.047340] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b1cfe9a-1154-49a3-9bb7-2b35010110c1 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "e17fcc84-7c86-41b6-88ec-8a35619534b6" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1055.138193] env[69994]: DEBUG nova.compute.manager [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1055.195902] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.333s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1055.195902] env[69994]: DEBUG nova.compute.manager [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1055.198299] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f84af1ea-e7ad-4056-8594-efcd5391769a tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.878s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1055.198545] env[69994]: DEBUG nova.objects.instance [None req-f84af1ea-e7ad-4056-8594-efcd5391769a tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Lazy-loading 'resources' on Instance uuid 81bae584-e558-4f96-9696-2510fed5a2e0 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1055.271147] env[69994]: DEBUG oslo_vmware.api [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52c4021c-830e-f2f0-c679-7f21ed73ae42, 'name': SearchDatastore_Task, 'duration_secs': 0.011849} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.271426] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1055.271729] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] f00662a9-92e0-4520-9ced-3cfd6e83628b/f00662a9-92e0-4520-9ced-3cfd6e83628b.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1055.271991] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7d78ff6c-7a5a-43f0-a6cc-b62a648a5a16 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.277966] env[69994]: DEBUG oslo_vmware.api [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1055.277966] env[69994]: value = "task-3242418" [ 1055.277966] env[69994]: _type = "Task" [ 1055.277966] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.287671] env[69994]: DEBUG oslo_vmware.api [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242418, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.465865] env[69994]: DEBUG nova.compute.manager [req-487dfb7d-e9e0-4692-867a-76fc99f3c88a req-accc3add-358d-43f9-9771-8e8882df6927 service nova] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Received event network-vif-deleted-d4a8692e-1b97-42dd-a02a-53c07d85ad0a {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1055.466113] env[69994]: INFO nova.compute.manager [req-487dfb7d-e9e0-4692-867a-76fc99f3c88a req-accc3add-358d-43f9-9771-8e8882df6927 service nova] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Neutron deleted interface d4a8692e-1b97-42dd-a02a-53c07d85ad0a; detaching it from the instance and deleting it from the info cache [ 1055.466256] env[69994]: DEBUG nova.network.neutron [req-487dfb7d-e9e0-4692-867a-76fc99f3c88a req-accc3add-358d-43f9-9771-8e8882df6927 service nova] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1055.493403] env[69994]: DEBUG oslo_vmware.api [None req-54ca85fe-bbb4-4930-96e4-bc43d89b1c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3242417, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.548561] env[69994]: DEBUG nova.compute.utils [None req-6b1cfe9a-1154-49a3-9bb7-2b35010110c1 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1055.572813] env[69994]: DEBUG nova.network.neutron [-] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1055.668069] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1055.701894] env[69994]: DEBUG nova.compute.utils [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1055.703875] env[69994]: DEBUG nova.compute.manager [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1055.704071] env[69994]: DEBUG nova.network.neutron [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1055.769276] env[69994]: DEBUG nova.policy [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '64b979ffffc94e09bf911bdb89f4796a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ccb64f97e46a4e499df974959db53dcd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1055.787433] env[69994]: DEBUG oslo_vmware.api [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242418, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.970705] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-da3bf60b-22bc-4951-8325-9304db11f3dc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.979539] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a903fa6b-fb16-4d5a-af14-1a2f8d696fb8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.000925] env[69994]: DEBUG oslo_vmware.api [None req-54ca85fe-bbb4-4930-96e4-bc43d89b1c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3242417, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.003393] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d85f3611-fc48-47b8-a664-d3d83f64177a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.015820] env[69994]: DEBUG nova.compute.manager [req-487dfb7d-e9e0-4692-867a-76fc99f3c88a req-accc3add-358d-43f9-9771-8e8882df6927 service nova] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Detach interface failed, port_id=d4a8692e-1b97-42dd-a02a-53c07d85ad0a, reason: Instance 850930f9-d5fb-4546-9796-30e164a1cdd3 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1056.020279] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f962328-56d5-4188-a845-df85bb41f8b7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.050632] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5f95afb-d212-4226-a91a-89a1cd3a1189 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.053501] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b1cfe9a-1154-49a3-9bb7-2b35010110c1 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "e17fcc84-7c86-41b6-88ec-8a35619534b6" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1056.058213] env[69994]: DEBUG nova.network.neutron [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Successfully created port: 49963331-a486-495f-a065-cbcd2c380941 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1056.060967] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76dc1963-f5a0-422e-80de-9521b5256887 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.074327] env[69994]: DEBUG nova.compute.provider_tree [None req-f84af1ea-e7ad-4056-8594-efcd5391769a tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1056.079017] env[69994]: INFO nova.compute.manager [-] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Took 1.49 seconds to deallocate network for instance. [ 1056.214783] env[69994]: DEBUG nova.compute.manager [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1056.290562] env[69994]: DEBUG oslo_vmware.api [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242418, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.506086] env[69994]: DEBUG oslo_vmware.api [None req-54ca85fe-bbb4-4930-96e4-bc43d89b1c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3242417, 'name': ReconfigVM_Task, 'duration_secs': 1.288502} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.506589] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-54ca85fe-bbb4-4930-96e4-bc43d89b1c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Reconfigured VM instance instance-0000004b to detach disk 2002 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1056.514336] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ac445679-1ba1-4df3-84fa-0b97c0ada421 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.535016] env[69994]: DEBUG oslo_vmware.api [None req-54ca85fe-bbb4-4930-96e4-bc43d89b1c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for the task: (returnval){ [ 1056.535016] env[69994]: value = "task-3242419" [ 1056.535016] env[69994]: _type = "Task" [ 1056.535016] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.544633] env[69994]: DEBUG oslo_vmware.api [None req-54ca85fe-bbb4-4930-96e4-bc43d89b1c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3242419, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.578397] env[69994]: DEBUG nova.scheduler.client.report [None req-f84af1ea-e7ad-4056-8594-efcd5391769a tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1056.584681] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d3724d59-c68e-49fb-acfe-5cfbf52d668a tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1056.794021] env[69994]: DEBUG oslo_vmware.api [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242418, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.31204} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.794021] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] f00662a9-92e0-4520-9ced-3cfd6e83628b/f00662a9-92e0-4520-9ced-3cfd6e83628b.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1056.794021] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1056.794308] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aa0b78ba-627a-40ab-b4e8-7a0d9eb7c2b3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.800934] env[69994]: DEBUG oslo_vmware.api [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1056.800934] env[69994]: value = "task-3242420" [ 1056.800934] env[69994]: _type = "Task" [ 1056.800934] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.809906] env[69994]: DEBUG oslo_vmware.api [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242420, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.045712] env[69994]: DEBUG oslo_vmware.api [None req-54ca85fe-bbb4-4930-96e4-bc43d89b1c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3242419, 'name': ReconfigVM_Task, 'duration_secs': 0.151911} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.045945] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-54ca85fe-bbb4-4930-96e4-bc43d89b1c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647991', 'volume_id': '42846572-fbb3-484d-863b-3efb63333e94', 'name': 'volume-42846572-fbb3-484d-863b-3efb63333e94', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '29071eb9-6334-4c23-acb4-142c12aa448d', 'attached_at': '', 'detached_at': '', 'volume_id': '42846572-fbb3-484d-863b-3efb63333e94', 'serial': '42846572-fbb3-484d-863b-3efb63333e94'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1057.086151] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f84af1ea-e7ad-4056-8594-efcd5391769a tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.888s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.088702] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.756s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1057.090346] env[69994]: INFO nova.compute.claims [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1057.111347] env[69994]: INFO nova.scheduler.client.report [None req-f84af1ea-e7ad-4056-8594-efcd5391769a tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Deleted allocations for instance 81bae584-e558-4f96-9696-2510fed5a2e0 [ 1057.120358] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b1cfe9a-1154-49a3-9bb7-2b35010110c1 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "e17fcc84-7c86-41b6-88ec-8a35619534b6" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1057.120662] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b1cfe9a-1154-49a3-9bb7-2b35010110c1 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "e17fcc84-7c86-41b6-88ec-8a35619534b6" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1057.120899] env[69994]: INFO nova.compute.manager [None req-6b1cfe9a-1154-49a3-9bb7-2b35010110c1 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Attaching volume 15a9e42e-3dfd-43a3-9ef9-92e636edfdbc to /dev/sdb [ 1057.152734] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb389b2a-5094-4994-882d-9a53509a50e0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.160893] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be35ad9d-7098-41fc-805b-2432203bcc4b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.175049] env[69994]: DEBUG nova.virt.block_device [None req-6b1cfe9a-1154-49a3-9bb7-2b35010110c1 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Updating existing volume attachment record: 43c5328a-6fa8-4dd4-9549-1a91f3a9cd87 {{(pid=69994) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1057.224155] env[69994]: DEBUG nova.compute.manager [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1057.250660] env[69994]: DEBUG nova.virt.hardware [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1057.250920] env[69994]: DEBUG nova.virt.hardware [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1057.251485] env[69994]: DEBUG nova.virt.hardware [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1057.251485] env[69994]: DEBUG nova.virt.hardware [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1057.251485] env[69994]: DEBUG nova.virt.hardware [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1057.251633] env[69994]: DEBUG nova.virt.hardware [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1057.251888] env[69994]: DEBUG nova.virt.hardware [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1057.252082] env[69994]: DEBUG nova.virt.hardware [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1057.252260] env[69994]: DEBUG nova.virt.hardware [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1057.252428] env[69994]: DEBUG nova.virt.hardware [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1057.252603] env[69994]: DEBUG nova.virt.hardware [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1057.253488] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04255d8c-0105-44e4-8aa3-e0d332c2ed57 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.261115] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31cd4424-919e-469d-9deb-02240d7a972c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.309256] env[69994]: DEBUG oslo_vmware.api [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242420, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073543} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.309542] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1057.310315] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-378f65d6-1b68-46ca-b8da-d24a56483073 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.332335] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Reconfiguring VM instance instance-0000005b to attach disk [datastore2] f00662a9-92e0-4520-9ced-3cfd6e83628b/f00662a9-92e0-4520-9ced-3cfd6e83628b.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1057.332626] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-549026f6-c421-4998-aba0-2f093afe45b6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.351602] env[69994]: DEBUG oslo_vmware.api [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1057.351602] env[69994]: value = "task-3242422" [ 1057.351602] env[69994]: _type = "Task" [ 1057.351602] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.359056] env[69994]: DEBUG oslo_vmware.api [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242422, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.494646] env[69994]: DEBUG nova.compute.manager [req-dad12561-0500-4433-863f-374433cfe39f req-c761f5f9-a2c5-4c72-8f75-3aebcb7f730f service nova] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Received event network-vif-plugged-49963331-a486-495f-a065-cbcd2c380941 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1057.494893] env[69994]: DEBUG oslo_concurrency.lockutils [req-dad12561-0500-4433-863f-374433cfe39f req-c761f5f9-a2c5-4c72-8f75-3aebcb7f730f service nova] Acquiring lock "fc31da72-d09e-415e-9866-3e7fc91fec79-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1057.495134] env[69994]: DEBUG oslo_concurrency.lockutils [req-dad12561-0500-4433-863f-374433cfe39f req-c761f5f9-a2c5-4c72-8f75-3aebcb7f730f service nova] Lock "fc31da72-d09e-415e-9866-3e7fc91fec79-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1057.495444] env[69994]: DEBUG oslo_concurrency.lockutils [req-dad12561-0500-4433-863f-374433cfe39f req-c761f5f9-a2c5-4c72-8f75-3aebcb7f730f service nova] Lock "fc31da72-d09e-415e-9866-3e7fc91fec79-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.495513] env[69994]: DEBUG nova.compute.manager [req-dad12561-0500-4433-863f-374433cfe39f req-c761f5f9-a2c5-4c72-8f75-3aebcb7f730f service nova] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] No waiting events found dispatching network-vif-plugged-49963331-a486-495f-a065-cbcd2c380941 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1057.495629] env[69994]: WARNING nova.compute.manager [req-dad12561-0500-4433-863f-374433cfe39f req-c761f5f9-a2c5-4c72-8f75-3aebcb7f730f service nova] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Received unexpected event network-vif-plugged-49963331-a486-495f-a065-cbcd2c380941 for instance with vm_state building and task_state spawning. [ 1057.568180] env[69994]: DEBUG nova.network.neutron [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Successfully updated port: 49963331-a486-495f-a065-cbcd2c380941 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1057.598510] env[69994]: DEBUG nova.objects.instance [None req-54ca85fe-bbb4-4930-96e4-bc43d89b1c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lazy-loading 'flavor' on Instance uuid 29071eb9-6334-4c23-acb4-142c12aa448d {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1057.619680] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f84af1ea-e7ad-4056-8594-efcd5391769a tempest-ServersTestMultiNic-89683508 tempest-ServersTestMultiNic-89683508-project-member] Lock "81bae584-e558-4f96-9696-2510fed5a2e0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.569s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.867832] env[69994]: DEBUG oslo_vmware.api [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242422, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.071168] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "refresh_cache-fc31da72-d09e-415e-9866-3e7fc91fec79" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.075021] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquired lock "refresh_cache-fc31da72-d09e-415e-9866-3e7fc91fec79" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1058.075021] env[69994]: DEBUG nova.network.neutron [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1058.308944] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94d4ab47-7e3f-4221-83dd-a6740eac2a04 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.316499] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01ebfd38-cdd0-4838-8eb8-3a6aca37420a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.350008] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2e17e25-b69f-4bc5-9363-4250af6997d3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.362677] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddf5cf81-bc93-4333-831e-39d0a9cb02b4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.366473] env[69994]: DEBUG oslo_vmware.api [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242422, 'name': ReconfigVM_Task, 'duration_secs': 0.590132} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.366848] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Reconfigured VM instance instance-0000005b to attach disk [datastore2] f00662a9-92e0-4520-9ced-3cfd6e83628b/f00662a9-92e0-4520-9ced-3cfd6e83628b.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1058.367824] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bcbb98ea-dc94-40f6-8cb7-b29eaaeb78d5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.378492] env[69994]: DEBUG nova.compute.provider_tree [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1058.384555] env[69994]: DEBUG oslo_vmware.api [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1058.384555] env[69994]: value = "task-3242425" [ 1058.384555] env[69994]: _type = "Task" [ 1058.384555] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.392233] env[69994]: DEBUG oslo_vmware.api [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242425, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.607370] env[69994]: DEBUG oslo_concurrency.lockutils [None req-54ca85fe-bbb4-4930-96e4-bc43d89b1c11 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "29071eb9-6334-4c23-acb4-142c12aa448d" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.261s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1058.615517] env[69994]: DEBUG nova.network.neutron [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1058.841472] env[69994]: DEBUG nova.network.neutron [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Updating instance_info_cache with network_info: [{"id": "49963331-a486-495f-a065-cbcd2c380941", "address": "fa:16:3e:f4:c2:39", "network": {"id": "84a79aab-0d1e-4aaf-9422-316cb7d239fb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1732515887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccb64f97e46a4e499df974959db53dcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap49963331-a4", "ovs_interfaceid": "49963331-a486-495f-a065-cbcd2c380941", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.902468] env[69994]: DEBUG oslo_vmware.api [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242425, 'name': Rename_Task, 'duration_secs': 0.13851} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.902762] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1058.903063] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3e836f05-fd8d-449c-9f7a-db41629579cb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.907704] env[69994]: ERROR nova.scheduler.client.report [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] [req-14e33ac0-619a-4451-9818-240a8b90998f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 92ce3c95-4efe-4d04-802b-6b187afc5aa7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-14e33ac0-619a-4451-9818-240a8b90998f"}]} [ 1058.911295] env[69994]: DEBUG oslo_vmware.api [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1058.911295] env[69994]: value = "task-3242426" [ 1058.911295] env[69994]: _type = "Task" [ 1058.911295] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.919251] env[69994]: DEBUG oslo_vmware.api [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242426, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.926818] env[69994]: DEBUG nova.scheduler.client.report [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Refreshing inventories for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1058.944860] env[69994]: DEBUG nova.scheduler.client.report [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Updating ProviderTree inventory for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1058.945102] env[69994]: DEBUG nova.compute.provider_tree [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1058.958727] env[69994]: DEBUG nova.scheduler.client.report [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Refreshing aggregate associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1058.979349] env[69994]: DEBUG nova.scheduler.client.report [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Refreshing trait associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1059.237984] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0931d1e-f4ae-4ff3-a85e-d93bc7772d51 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.247817] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e28699ed-1bea-4d6d-9003-c3fba03e982e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.278618] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be6d04c7-1848-4446-b417-403aef849d52 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.286407] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ecfe161-cf9c-4443-bf61-d9a7bc71dc97 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.302757] env[69994]: DEBUG nova.compute.provider_tree [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1059.344368] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Releasing lock "refresh_cache-fc31da72-d09e-415e-9866-3e7fc91fec79" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1059.344715] env[69994]: DEBUG nova.compute.manager [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Instance network_info: |[{"id": "49963331-a486-495f-a065-cbcd2c380941", "address": "fa:16:3e:f4:c2:39", "network": {"id": "84a79aab-0d1e-4aaf-9422-316cb7d239fb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1732515887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccb64f97e46a4e499df974959db53dcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap49963331-a4", "ovs_interfaceid": "49963331-a486-495f-a065-cbcd2c380941", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1059.345419] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:c2:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c2383c27-232e-4745-9b0a-2dcbaabb188b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '49963331-a486-495f-a065-cbcd2c380941', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1059.352826] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1059.353050] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1059.353281] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2adadc21-955a-49cd-9601-83d10d9b0b19 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.373296] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1059.373296] env[69994]: value = "task-3242427" [ 1059.373296] env[69994]: _type = "Task" [ 1059.373296] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.380926] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242427, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.426555] env[69994]: DEBUG oslo_vmware.api [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242426, 'name': PowerOnVM_Task, 'duration_secs': 0.437521} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.426555] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1059.426555] env[69994]: INFO nova.compute.manager [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Took 8.24 seconds to spawn the instance on the hypervisor. [ 1059.426555] env[69994]: DEBUG nova.compute.manager [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1059.426555] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-950e3398-7b63-4775-9113-8e45102dafdc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.620588] env[69994]: DEBUG nova.compute.manager [req-6350d5aa-19b4-4977-b66a-2e0583abcf0a req-e0e47635-b46e-4403-bccf-c6fdca4dd917 service nova] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Received event network-changed-49963331-a486-495f-a065-cbcd2c380941 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1059.620819] env[69994]: DEBUG nova.compute.manager [req-6350d5aa-19b4-4977-b66a-2e0583abcf0a req-e0e47635-b46e-4403-bccf-c6fdca4dd917 service nova] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Refreshing instance network info cache due to event network-changed-49963331-a486-495f-a065-cbcd2c380941. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1059.621054] env[69994]: DEBUG oslo_concurrency.lockutils [req-6350d5aa-19b4-4977-b66a-2e0583abcf0a req-e0e47635-b46e-4403-bccf-c6fdca4dd917 service nova] Acquiring lock "refresh_cache-fc31da72-d09e-415e-9866-3e7fc91fec79" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1059.621219] env[69994]: DEBUG oslo_concurrency.lockutils [req-6350d5aa-19b4-4977-b66a-2e0583abcf0a req-e0e47635-b46e-4403-bccf-c6fdca4dd917 service nova] Acquired lock "refresh_cache-fc31da72-d09e-415e-9866-3e7fc91fec79" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1059.621369] env[69994]: DEBUG nova.network.neutron [req-6350d5aa-19b4-4977-b66a-2e0583abcf0a req-e0e47635-b46e-4403-bccf-c6fdca4dd917 service nova] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Refreshing network info cache for port 49963331-a486-495f-a065-cbcd2c380941 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1059.844079] env[69994]: DEBUG nova.scheduler.client.report [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Updated inventory for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with generation 126 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1059.844368] env[69994]: DEBUG nova.compute.provider_tree [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Updating resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 generation from 126 to 127 during operation: update_inventory {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1059.844554] env[69994]: DEBUG nova.compute.provider_tree [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1059.883150] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242427, 'name': CreateVM_Task, 'duration_secs': 0.319886} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.883332] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1059.883994] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1059.884176] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1059.884528] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1059.884772] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a689abe-f6cd-48bd-b268-141d4ba52218 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.889200] env[69994]: DEBUG oslo_vmware.api [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1059.889200] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]527fd59a-46b1-ff82-00e3-bed87bcdb9fb" [ 1059.889200] env[69994]: _type = "Task" [ 1059.889200] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.896854] env[69994]: DEBUG oslo_vmware.api [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]527fd59a-46b1-ff82-00e3-bed87bcdb9fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.907938] env[69994]: DEBUG oslo_concurrency.lockutils [None req-335c38ea-237c-4ee6-b277-fd942030f610 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Acquiring lock "29071eb9-6334-4c23-acb4-142c12aa448d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1059.908207] env[69994]: DEBUG oslo_concurrency.lockutils [None req-335c38ea-237c-4ee6-b277-fd942030f610 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "29071eb9-6334-4c23-acb4-142c12aa448d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1059.908513] env[69994]: DEBUG oslo_concurrency.lockutils [None req-335c38ea-237c-4ee6-b277-fd942030f610 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Acquiring lock "29071eb9-6334-4c23-acb4-142c12aa448d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1059.908709] env[69994]: DEBUG oslo_concurrency.lockutils [None req-335c38ea-237c-4ee6-b277-fd942030f610 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "29071eb9-6334-4c23-acb4-142c12aa448d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1059.908882] env[69994]: DEBUG oslo_concurrency.lockutils [None req-335c38ea-237c-4ee6-b277-fd942030f610 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "29071eb9-6334-4c23-acb4-142c12aa448d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1059.914430] env[69994]: INFO nova.compute.manager [None req-335c38ea-237c-4ee6-b277-fd942030f610 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Terminating instance [ 1059.947096] env[69994]: INFO nova.compute.manager [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Took 31.45 seconds to build instance. [ 1060.340922] env[69994]: DEBUG nova.network.neutron [req-6350d5aa-19b4-4977-b66a-2e0583abcf0a req-e0e47635-b46e-4403-bccf-c6fdca4dd917 service nova] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Updated VIF entry in instance network info cache for port 49963331-a486-495f-a065-cbcd2c380941. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1060.340922] env[69994]: DEBUG nova.network.neutron [req-6350d5aa-19b4-4977-b66a-2e0583abcf0a req-e0e47635-b46e-4403-bccf-c6fdca4dd917 service nova] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Updating instance_info_cache with network_info: [{"id": "49963331-a486-495f-a065-cbcd2c380941", "address": "fa:16:3e:f4:c2:39", "network": {"id": "84a79aab-0d1e-4aaf-9422-316cb7d239fb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1732515887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccb64f97e46a4e499df974959db53dcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap49963331-a4", "ovs_interfaceid": "49963331-a486-495f-a065-cbcd2c380941", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.353019] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.262s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1060.353019] env[69994]: DEBUG nova.compute.manager [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1060.354109] env[69994]: DEBUG oslo_concurrency.lockutils [None req-240441be-35b5-4bfd-b6fc-18945eedf5a2 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.526s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1060.354484] env[69994]: DEBUG nova.objects.instance [None req-240441be-35b5-4bfd-b6fc-18945eedf5a2 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Lazy-loading 'resources' on Instance uuid 071151e4-a3ee-4a89-8b83-19bef3fb7d3e {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1060.405366] env[69994]: DEBUG oslo_vmware.api [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]527fd59a-46b1-ff82-00e3-bed87bcdb9fb, 'name': SearchDatastore_Task, 'duration_secs': 0.010188} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.405891] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1060.406164] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1060.406415] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1060.406564] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1060.406747] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1060.407027] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2f4b83cd-42bf-4bc2-b0a6-250664aaa8a1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.415977] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1060.416180] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1060.418458] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39b06541-27e7-4400-9a0a-f08436cbdbec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.421576] env[69994]: DEBUG nova.compute.manager [None req-335c38ea-237c-4ee6-b277-fd942030f610 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1060.421775] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-335c38ea-237c-4ee6-b277-fd942030f610 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1060.422567] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6402fc94-cb68-46c1-aeb5-62e86619bd65 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.426296] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a916bfe6-a6ab-45d4-b424-cf5a30422707 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.433692] env[69994]: DEBUG oslo_vmware.api [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1060.433692] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52ada47e-ddad-93a9-7c80-8d5d6ec560ca" [ 1060.433692] env[69994]: _type = "Task" [ 1060.433692] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.437415] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-335c38ea-237c-4ee6-b277-fd942030f610 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1060.437415] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a3deea8f-9d7c-4926-af0b-6fd3bd162b07 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Suspending the VM {{(pid=69994) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1060.440342] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2169bdc6-1a1b-433b-b253-ad802f07948c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.441823] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-cf0ed415-3dc3-4143-aee5-69710deb8824 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.450509] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a54b3b95-a635-4884-80e9-320a1afccbd2 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "f00662a9-92e0-4520-9ced-3cfd6e83628b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.964s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1060.451223] env[69994]: DEBUG oslo_vmware.api [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52ada47e-ddad-93a9-7c80-8d5d6ec560ca, 'name': SearchDatastore_Task, 'duration_secs': 0.009821} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.459445] env[69994]: DEBUG oslo_vmware.api [None req-a3deea8f-9d7c-4926-af0b-6fd3bd162b07 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1060.459445] env[69994]: value = "task-3242430" [ 1060.459445] env[69994]: _type = "Task" [ 1060.459445] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.459763] env[69994]: DEBUG oslo_vmware.api [None req-335c38ea-237c-4ee6-b277-fd942030f610 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for the task: (returnval){ [ 1060.459763] env[69994]: value = "task-3242429" [ 1060.459763] env[69994]: _type = "Task" [ 1060.459763] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.460081] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a52dea3-8d75-4f99-a6d1-40a5de637096 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.474928] env[69994]: DEBUG oslo_vmware.api [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1060.474928] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5291e36d-b677-8c16-aa80-72c02cf17b20" [ 1060.474928] env[69994]: _type = "Task" [ 1060.474928] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.481840] env[69994]: DEBUG oslo_vmware.api [None req-335c38ea-237c-4ee6-b277-fd942030f610 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3242429, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.481840] env[69994]: DEBUG oslo_vmware.api [None req-a3deea8f-9d7c-4926-af0b-6fd3bd162b07 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242430, 'name': SuspendVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.490915] env[69994]: DEBUG oslo_vmware.api [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5291e36d-b677-8c16-aa80-72c02cf17b20, 'name': SearchDatastore_Task, 'duration_secs': 0.011163} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.491201] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1060.491486] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] fc31da72-d09e-415e-9866-3e7fc91fec79/fc31da72-d09e-415e-9866-3e7fc91fec79.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1060.491759] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7fcc0ecc-8500-4267-9876-5d9be1b17afb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.498161] env[69994]: DEBUG oslo_vmware.api [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1060.498161] env[69994]: value = "task-3242431" [ 1060.498161] env[69994]: _type = "Task" [ 1060.498161] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.507012] env[69994]: DEBUG oslo_vmware.api [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242431, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.843550] env[69994]: DEBUG oslo_concurrency.lockutils [req-6350d5aa-19b4-4977-b66a-2e0583abcf0a req-e0e47635-b46e-4403-bccf-c6fdca4dd917 service nova] Releasing lock "refresh_cache-fc31da72-d09e-415e-9866-3e7fc91fec79" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1060.858514] env[69994]: DEBUG nova.compute.utils [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1060.864198] env[69994]: DEBUG nova.compute.manager [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1060.864437] env[69994]: DEBUG nova.network.neutron [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1060.942852] env[69994]: DEBUG nova.policy [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e524b2b6a23748a4abfa3402e1da24ad', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '54c17fed52c145b08f369e78c279a4a5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1060.983338] env[69994]: DEBUG oslo_vmware.api [None req-a3deea8f-9d7c-4926-af0b-6fd3bd162b07 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242430, 'name': SuspendVM_Task} progress is 62%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.987433] env[69994]: DEBUG oslo_vmware.api [None req-335c38ea-237c-4ee6-b277-fd942030f610 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3242429, 'name': PowerOffVM_Task, 'duration_secs': 0.227228} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.990608] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-335c38ea-237c-4ee6-b277-fd942030f610 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1060.990867] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-335c38ea-237c-4ee6-b277-fd942030f610 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1060.991359] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bad31751-acac-4116-a74a-9713488a4ef4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.014092] env[69994]: DEBUG oslo_vmware.api [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242431, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.067015] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-335c38ea-237c-4ee6-b277-fd942030f610 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1061.067283] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-335c38ea-237c-4ee6-b277-fd942030f610 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1061.067883] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-335c38ea-237c-4ee6-b277-fd942030f610 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Deleting the datastore file [datastore1] 29071eb9-6334-4c23-acb4-142c12aa448d {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1061.067883] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-130204d7-3e84-4e0d-975c-90b89ee8b631 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.080828] env[69994]: DEBUG oslo_vmware.api [None req-335c38ea-237c-4ee6-b277-fd942030f610 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for the task: (returnval){ [ 1061.080828] env[69994]: value = "task-3242433" [ 1061.080828] env[69994]: _type = "Task" [ 1061.080828] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.091790] env[69994]: DEBUG oslo_vmware.api [None req-335c38ea-237c-4ee6-b277-fd942030f610 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3242433, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.153302] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11c1c7be-1c68-41f7-8f94-e5b61a9936a3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.161176] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8177f573-045b-4cb5-933e-ed9534fbaf58 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.196371] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2419a1db-9602-4cf0-b783-5fef12adf13a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.204414] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-624fe90f-93b1-490d-b8d8-db897f93fa74 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.219147] env[69994]: DEBUG nova.compute.provider_tree [None req-240441be-35b5-4bfd-b6fc-18945eedf5a2 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1061.284929] env[69994]: DEBUG nova.network.neutron [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Successfully created port: 5068e2e3-d196-4aea-8f08-ae8d9c0239cc {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1061.365234] env[69994]: DEBUG nova.compute.manager [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1061.474317] env[69994]: DEBUG oslo_vmware.api [None req-a3deea8f-9d7c-4926-af0b-6fd3bd162b07 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242430, 'name': SuspendVM_Task, 'duration_secs': 0.775136} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.474629] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-a3deea8f-9d7c-4926-af0b-6fd3bd162b07 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Suspended the VM {{(pid=69994) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1061.474813] env[69994]: DEBUG nova.compute.manager [None req-a3deea8f-9d7c-4926-af0b-6fd3bd162b07 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1061.475630] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b764c36-fd30-4242-8c1f-b228ab2b8d9b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.519853] env[69994]: DEBUG oslo_vmware.api [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242431, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.61958} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.520141] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] fc31da72-d09e-415e-9866-3e7fc91fec79/fc31da72-d09e-415e-9866-3e7fc91fec79.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1061.522895] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1061.522895] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1f7c227d-2ff6-4b6a-ba28-a67e6ff0f1ec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.527568] env[69994]: DEBUG oslo_vmware.api [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1061.527568] env[69994]: value = "task-3242434" [ 1061.527568] env[69994]: _type = "Task" [ 1061.527568] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.538452] env[69994]: DEBUG oslo_vmware.api [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242434, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.598872] env[69994]: DEBUG oslo_vmware.api [None req-335c38ea-237c-4ee6-b277-fd942030f610 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Task: {'id': task-3242433, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.200817} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.599136] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-335c38ea-237c-4ee6-b277-fd942030f610 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1061.599364] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-335c38ea-237c-4ee6-b277-fd942030f610 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1061.599577] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-335c38ea-237c-4ee6-b277-fd942030f610 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1061.599785] env[69994]: INFO nova.compute.manager [None req-335c38ea-237c-4ee6-b277-fd942030f610 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1061.600108] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-335c38ea-237c-4ee6-b277-fd942030f610 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1061.600343] env[69994]: DEBUG nova.compute.manager [-] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1061.600466] env[69994]: DEBUG nova.network.neutron [-] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1061.660037] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "e6acdc45-5e8f-4ff0-9259-3de73a6fdd14" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1061.660293] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "e6acdc45-5e8f-4ff0-9259-3de73a6fdd14" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1061.727675] env[69994]: DEBUG nova.scheduler.client.report [None req-240441be-35b5-4bfd-b6fc-18945eedf5a2 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1061.731928] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b1cfe9a-1154-49a3-9bb7-2b35010110c1 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Volume attach. Driver type: vmdk {{(pid=69994) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1061.732165] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b1cfe9a-1154-49a3-9bb7-2b35010110c1 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647994', 'volume_id': '15a9e42e-3dfd-43a3-9ef9-92e636edfdbc', 'name': 'volume-15a9e42e-3dfd-43a3-9ef9-92e636edfdbc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e17fcc84-7c86-41b6-88ec-8a35619534b6', 'attached_at': '', 'detached_at': '', 'volume_id': '15a9e42e-3dfd-43a3-9ef9-92e636edfdbc', 'serial': '15a9e42e-3dfd-43a3-9ef9-92e636edfdbc'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1061.733739] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-782e4532-9bf6-4f2b-b95b-1ed82e471688 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.763175] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-360ebc8f-02c8-4400-bd0e-0c9ca532912b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.789836] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b1cfe9a-1154-49a3-9bb7-2b35010110c1 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Reconfiguring VM instance instance-0000004f to attach disk [datastore2] volume-15a9e42e-3dfd-43a3-9ef9-92e636edfdbc/volume-15a9e42e-3dfd-43a3-9ef9-92e636edfdbc.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1061.790441] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ab263a4-a582-44a6-a1f3-070c6e272165 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.812041] env[69994]: DEBUG oslo_vmware.api [None req-6b1cfe9a-1154-49a3-9bb7-2b35010110c1 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1061.812041] env[69994]: value = "task-3242435" [ 1061.812041] env[69994]: _type = "Task" [ 1061.812041] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.821523] env[69994]: DEBUG oslo_vmware.api [None req-6b1cfe9a-1154-49a3-9bb7-2b35010110c1 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242435, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.013732] env[69994]: DEBUG oslo_concurrency.lockutils [None req-796a5707-896e-4c55-892b-0a564dd18ba5 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquiring lock "ef410b09-8686-409e-8391-d50cd0e0df04" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1062.014408] env[69994]: DEBUG oslo_concurrency.lockutils [None req-796a5707-896e-4c55-892b-0a564dd18ba5 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lock "ef410b09-8686-409e-8391-d50cd0e0df04" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1062.038131] env[69994]: DEBUG oslo_vmware.api [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242434, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089272} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.038483] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1062.039286] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5bec240-7ee4-40c5-baf9-dd8fd301730b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.061580] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Reconfiguring VM instance instance-0000005c to attach disk [datastore2] fc31da72-d09e-415e-9866-3e7fc91fec79/fc31da72-d09e-415e-9866-3e7fc91fec79.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1062.062135] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-55806649-892b-45e0-ad20-12741ce81ce3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.084684] env[69994]: DEBUG oslo_vmware.api [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1062.084684] env[69994]: value = "task-3242436" [ 1062.084684] env[69994]: _type = "Task" [ 1062.084684] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.092556] env[69994]: DEBUG oslo_vmware.api [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242436, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.165570] env[69994]: DEBUG nova.compute.manager [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1062.170082] env[69994]: DEBUG nova.compute.manager [req-bcc3d063-6991-4c4e-96fc-e934aaeeb9be req-4c548b79-c070-486c-b718-e839414942c5 service nova] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Received event network-vif-deleted-678dab49-879d-4408-9488-fef42c017965 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1062.170255] env[69994]: INFO nova.compute.manager [req-bcc3d063-6991-4c4e-96fc-e934aaeeb9be req-4c548b79-c070-486c-b718-e839414942c5 service nova] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Neutron deleted interface 678dab49-879d-4408-9488-fef42c017965; detaching it from the instance and deleting it from the info cache [ 1062.170431] env[69994]: DEBUG nova.network.neutron [req-bcc3d063-6991-4c4e-96fc-e934aaeeb9be req-4c548b79-c070-486c-b718-e839414942c5 service nova] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1062.237785] env[69994]: DEBUG oslo_concurrency.lockutils [None req-240441be-35b5-4bfd-b6fc-18945eedf5a2 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.884s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1062.240908] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 10.118s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1062.265576] env[69994]: INFO nova.scheduler.client.report [None req-240441be-35b5-4bfd-b6fc-18945eedf5a2 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Deleted allocations for instance 071151e4-a3ee-4a89-8b83-19bef3fb7d3e [ 1062.322777] env[69994]: DEBUG oslo_vmware.api [None req-6b1cfe9a-1154-49a3-9bb7-2b35010110c1 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242435, 'name': ReconfigVM_Task, 'duration_secs': 0.432568} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.323105] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b1cfe9a-1154-49a3-9bb7-2b35010110c1 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Reconfigured VM instance instance-0000004f to attach disk [datastore2] volume-15a9e42e-3dfd-43a3-9ef9-92e636edfdbc/volume-15a9e42e-3dfd-43a3-9ef9-92e636edfdbc.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1062.327691] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f8260699-31e2-4414-8a01-ed81d2db0495 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.343073] env[69994]: DEBUG oslo_vmware.api [None req-6b1cfe9a-1154-49a3-9bb7-2b35010110c1 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1062.343073] env[69994]: value = "task-3242437" [ 1062.343073] env[69994]: _type = "Task" [ 1062.343073] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.351073] env[69994]: DEBUG oslo_vmware.api [None req-6b1cfe9a-1154-49a3-9bb7-2b35010110c1 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242437, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.375017] env[69994]: DEBUG nova.compute.manager [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1062.407716] env[69994]: DEBUG nova.virt.hardware [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1062.408012] env[69994]: DEBUG nova.virt.hardware [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1062.408194] env[69994]: DEBUG nova.virt.hardware [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1062.408427] env[69994]: DEBUG nova.virt.hardware [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1062.408607] env[69994]: DEBUG nova.virt.hardware [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1062.408819] env[69994]: DEBUG nova.virt.hardware [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1062.409183] env[69994]: DEBUG nova.virt.hardware [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1062.409403] env[69994]: DEBUG nova.virt.hardware [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1062.409590] env[69994]: DEBUG nova.virt.hardware [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1062.409760] env[69994]: DEBUG nova.virt.hardware [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1062.409938] env[69994]: DEBUG nova.virt.hardware [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1062.411011] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f6465bc-9978-4568-b7e7-b0e6b7275f12 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.418888] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69fa0f04-7ca1-40fa-9fce-6e33339c5203 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.518789] env[69994]: INFO nova.compute.manager [None req-796a5707-896e-4c55-892b-0a564dd18ba5 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Detaching volume aca9e875-f8b1-40fd-8a48-9f448b62ea3b [ 1062.559857] env[69994]: INFO nova.virt.block_device [None req-796a5707-896e-4c55-892b-0a564dd18ba5 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Attempting to driver detach volume aca9e875-f8b1-40fd-8a48-9f448b62ea3b from mountpoint /dev/sdb [ 1062.560163] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-796a5707-896e-4c55-892b-0a564dd18ba5 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Volume detach. Driver type: vmdk {{(pid=69994) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1062.560405] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-796a5707-896e-4c55-892b-0a564dd18ba5 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647972', 'volume_id': 'aca9e875-f8b1-40fd-8a48-9f448b62ea3b', 'name': 'volume-aca9e875-f8b1-40fd-8a48-9f448b62ea3b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ef410b09-8686-409e-8391-d50cd0e0df04', 'attached_at': '', 'detached_at': '', 'volume_id': 'aca9e875-f8b1-40fd-8a48-9f448b62ea3b', 'serial': 'aca9e875-f8b1-40fd-8a48-9f448b62ea3b'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1062.561405] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63242f84-34d7-441c-8240-2ed9155cd7ab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.584529] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e9c9cd2-71c8-4d81-9e67-ea10a391ee68 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.596531] env[69994]: DEBUG oslo_vmware.api [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242436, 'name': ReconfigVM_Task, 'duration_secs': 0.329068} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.598240] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Reconfigured VM instance instance-0000005c to attach disk [datastore2] fc31da72-d09e-415e-9866-3e7fc91fec79/fc31da72-d09e-415e-9866-3e7fc91fec79.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1062.598941] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ee92b2f1-a9c6-4209-a093-52f5aa46d11d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.600827] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cc018de-6d28-4107-a036-91b3552d18f8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.623210] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1ef736b-3626-4031-976a-9e420ffb7101 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.625853] env[69994]: DEBUG oslo_vmware.api [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1062.625853] env[69994]: value = "task-3242438" [ 1062.625853] env[69994]: _type = "Task" [ 1062.625853] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.639577] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-796a5707-896e-4c55-892b-0a564dd18ba5 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] The volume has not been displaced from its original location: [datastore1] volume-aca9e875-f8b1-40fd-8a48-9f448b62ea3b/volume-aca9e875-f8b1-40fd-8a48-9f448b62ea3b.vmdk. No consolidation needed. {{(pid=69994) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1062.644905] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-796a5707-896e-4c55-892b-0a564dd18ba5 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Reconfiguring VM instance instance-00000040 to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1062.645628] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-35cd9f9b-0049-4375-a080-129fc07443f5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.661145] env[69994]: DEBUG nova.network.neutron [-] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1062.662452] env[69994]: DEBUG oslo_vmware.api [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242438, 'name': Rename_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.668675] env[69994]: DEBUG oslo_vmware.api [None req-796a5707-896e-4c55-892b-0a564dd18ba5 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 1062.668675] env[69994]: value = "task-3242439" [ 1062.668675] env[69994]: _type = "Task" [ 1062.668675] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.677907] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fac899e2-78d8-4a46-a3c5-639d75ec8710 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.683715] env[69994]: DEBUG oslo_vmware.api [None req-796a5707-896e-4c55-892b-0a564dd18ba5 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242439, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.690952] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cde83fd0-38b6-409d-841f-351c49069e6f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.703052] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1062.727621] env[69994]: DEBUG nova.compute.manager [req-bcc3d063-6991-4c4e-96fc-e934aaeeb9be req-4c548b79-c070-486c-b718-e839414942c5 service nova] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Detach interface failed, port_id=678dab49-879d-4408-9488-fef42c017965, reason: Instance 29071eb9-6334-4c23-acb4-142c12aa448d could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1062.750198] env[69994]: INFO nova.compute.claims [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1062.773073] env[69994]: DEBUG oslo_concurrency.lockutils [None req-240441be-35b5-4bfd-b6fc-18945eedf5a2 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Lock "071151e4-a3ee-4a89-8b83-19bef3fb7d3e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.898s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1062.854119] env[69994]: DEBUG oslo_vmware.api [None req-6b1cfe9a-1154-49a3-9bb7-2b35010110c1 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242437, 'name': ReconfigVM_Task, 'duration_secs': 0.146965} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.854487] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b1cfe9a-1154-49a3-9bb7-2b35010110c1 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647994', 'volume_id': '15a9e42e-3dfd-43a3-9ef9-92e636edfdbc', 'name': 'volume-15a9e42e-3dfd-43a3-9ef9-92e636edfdbc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e17fcc84-7c86-41b6-88ec-8a35619534b6', 'attached_at': '', 'detached_at': '', 'volume_id': '15a9e42e-3dfd-43a3-9ef9-92e636edfdbc', 'serial': '15a9e42e-3dfd-43a3-9ef9-92e636edfdbc'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1063.128896] env[69994]: DEBUG nova.compute.manager [req-7087b904-38ba-420b-933f-a912dc33011b req-381963a0-bb0b-4f35-be5f-a99c89a2372f service nova] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Received event network-vif-plugged-5068e2e3-d196-4aea-8f08-ae8d9c0239cc {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1063.128896] env[69994]: DEBUG oslo_concurrency.lockutils [req-7087b904-38ba-420b-933f-a912dc33011b req-381963a0-bb0b-4f35-be5f-a99c89a2372f service nova] Acquiring lock "0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1063.128896] env[69994]: DEBUG oslo_concurrency.lockutils [req-7087b904-38ba-420b-933f-a912dc33011b req-381963a0-bb0b-4f35-be5f-a99c89a2372f service nova] Lock "0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1063.128896] env[69994]: DEBUG oslo_concurrency.lockutils [req-7087b904-38ba-420b-933f-a912dc33011b req-381963a0-bb0b-4f35-be5f-a99c89a2372f service nova] Lock "0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1063.128896] env[69994]: DEBUG nova.compute.manager [req-7087b904-38ba-420b-933f-a912dc33011b req-381963a0-bb0b-4f35-be5f-a99c89a2372f service nova] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] No waiting events found dispatching network-vif-plugged-5068e2e3-d196-4aea-8f08-ae8d9c0239cc {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1063.128896] env[69994]: WARNING nova.compute.manager [req-7087b904-38ba-420b-933f-a912dc33011b req-381963a0-bb0b-4f35-be5f-a99c89a2372f service nova] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Received unexpected event network-vif-plugged-5068e2e3-d196-4aea-8f08-ae8d9c0239cc for instance with vm_state building and task_state spawning. [ 1063.138066] env[69994]: DEBUG oslo_vmware.api [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242438, 'name': Rename_Task, 'duration_secs': 0.181225} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.138322] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1063.138559] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-25c403ce-da8d-4e26-b6dd-1b26365dd713 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.144779] env[69994]: DEBUG oslo_vmware.api [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1063.144779] env[69994]: value = "task-3242440" [ 1063.144779] env[69994]: _type = "Task" [ 1063.144779] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.152557] env[69994]: DEBUG oslo_vmware.api [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242440, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.165213] env[69994]: INFO nova.compute.manager [-] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Took 1.56 seconds to deallocate network for instance. [ 1063.177869] env[69994]: DEBUG oslo_vmware.api [None req-796a5707-896e-4c55-892b-0a564dd18ba5 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242439, 'name': ReconfigVM_Task, 'duration_secs': 0.250667} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.178208] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-796a5707-896e-4c55-892b-0a564dd18ba5 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Reconfigured VM instance instance-00000040 to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1063.184351] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f9476305-1983-4450-a5a2-2229bbce5f35 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.200262] env[69994]: DEBUG oslo_vmware.api [None req-796a5707-896e-4c55-892b-0a564dd18ba5 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 1063.200262] env[69994]: value = "task-3242441" [ 1063.200262] env[69994]: _type = "Task" [ 1063.200262] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.208863] env[69994]: DEBUG oslo_vmware.api [None req-796a5707-896e-4c55-892b-0a564dd18ba5 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242441, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.258022] env[69994]: INFO nova.compute.resource_tracker [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Updating resource usage from migration fdd10aa8-31c5-4ca6-937d-23c4b6d5f3f4 [ 1063.478825] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7a98300-6028-4d06-b7c5-c2afd7a0f1d4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.486483] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-314489db-2c55-469d-9e08-7db816594806 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.519177] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-835834d8-e330-41a8-aa43-0dda31d6241d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.530317] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50c30f8a-c9de-44bb-8ad5-3ce3c88ae82c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.545008] env[69994]: DEBUG nova.compute.provider_tree [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1063.589405] env[69994]: DEBUG nova.network.neutron [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Successfully updated port: 5068e2e3-d196-4aea-8f08-ae8d9c0239cc {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1063.608074] env[69994]: DEBUG nova.compute.manager [req-1a9e090f-3a94-4db4-a943-4afbbdd253b2 req-992f5694-b535-4e07-b817-a257014c6dd6 service nova] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Received event network-changed-5068e2e3-d196-4aea-8f08-ae8d9c0239cc {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1063.608177] env[69994]: DEBUG nova.compute.manager [req-1a9e090f-3a94-4db4-a943-4afbbdd253b2 req-992f5694-b535-4e07-b817-a257014c6dd6 service nova] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Refreshing instance network info cache due to event network-changed-5068e2e3-d196-4aea-8f08-ae8d9c0239cc. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1063.608392] env[69994]: DEBUG oslo_concurrency.lockutils [req-1a9e090f-3a94-4db4-a943-4afbbdd253b2 req-992f5694-b535-4e07-b817-a257014c6dd6 service nova] Acquiring lock "refresh_cache-0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.608538] env[69994]: DEBUG oslo_concurrency.lockutils [req-1a9e090f-3a94-4db4-a943-4afbbdd253b2 req-992f5694-b535-4e07-b817-a257014c6dd6 service nova] Acquired lock "refresh_cache-0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1063.608730] env[69994]: DEBUG nova.network.neutron [req-1a9e090f-3a94-4db4-a943-4afbbdd253b2 req-992f5694-b535-4e07-b817-a257014c6dd6 service nova] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Refreshing network info cache for port 5068e2e3-d196-4aea-8f08-ae8d9c0239cc {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1063.655837] env[69994]: DEBUG oslo_vmware.api [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242440, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.670381] env[69994]: DEBUG oslo_concurrency.lockutils [None req-335c38ea-237c-4ee6-b277-fd942030f610 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1063.714312] env[69994]: DEBUG oslo_vmware.api [None req-796a5707-896e-4c55-892b-0a564dd18ba5 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242441, 'name': ReconfigVM_Task, 'duration_secs': 0.13412} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.714866] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-796a5707-896e-4c55-892b-0a564dd18ba5 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647972', 'volume_id': 'aca9e875-f8b1-40fd-8a48-9f448b62ea3b', 'name': 'volume-aca9e875-f8b1-40fd-8a48-9f448b62ea3b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ef410b09-8686-409e-8391-d50cd0e0df04', 'attached_at': '', 'detached_at': '', 'volume_id': 'aca9e875-f8b1-40fd-8a48-9f448b62ea3b', 'serial': 'aca9e875-f8b1-40fd-8a48-9f448b62ea3b'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1063.900942] env[69994]: DEBUG nova.objects.instance [None req-6b1cfe9a-1154-49a3-9bb7-2b35010110c1 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lazy-loading 'flavor' on Instance uuid e17fcc84-7c86-41b6-88ec-8a35619534b6 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1063.969876] env[69994]: DEBUG nova.compute.manager [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1063.970879] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-130d1a3a-fcd4-45ca-ad18-3ce96d91e5eb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.026682] env[69994]: DEBUG oslo_concurrency.lockutils [None req-608ef843-d541-4a48-a2e5-1969a4dba344 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Acquiring lock "384889a3-c3d9-4e0e-8d1c-95193cf4343d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1064.027042] env[69994]: DEBUG oslo_concurrency.lockutils [None req-608ef843-d541-4a48-a2e5-1969a4dba344 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Lock "384889a3-c3d9-4e0e-8d1c-95193cf4343d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1064.027193] env[69994]: DEBUG oslo_concurrency.lockutils [None req-608ef843-d541-4a48-a2e5-1969a4dba344 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Acquiring lock "384889a3-c3d9-4e0e-8d1c-95193cf4343d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1064.027370] env[69994]: DEBUG oslo_concurrency.lockutils [None req-608ef843-d541-4a48-a2e5-1969a4dba344 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Lock "384889a3-c3d9-4e0e-8d1c-95193cf4343d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1064.027801] env[69994]: DEBUG oslo_concurrency.lockutils [None req-608ef843-d541-4a48-a2e5-1969a4dba344 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Lock "384889a3-c3d9-4e0e-8d1c-95193cf4343d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1064.029602] env[69994]: INFO nova.compute.manager [None req-608ef843-d541-4a48-a2e5-1969a4dba344 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Terminating instance [ 1064.065491] env[69994]: ERROR nova.scheduler.client.report [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [req-6300e8ce-e7ef-4c56-98e9-548d1d09bc9c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 92ce3c95-4efe-4d04-802b-6b187afc5aa7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-6300e8ce-e7ef-4c56-98e9-548d1d09bc9c"}]} [ 1064.082134] env[69994]: DEBUG nova.scheduler.client.report [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Refreshing inventories for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1064.092155] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Acquiring lock "refresh_cache-0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1064.106238] env[69994]: DEBUG nova.scheduler.client.report [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Updating ProviderTree inventory for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1064.106426] env[69994]: DEBUG nova.compute.provider_tree [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1064.121229] env[69994]: DEBUG nova.scheduler.client.report [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Refreshing aggregate associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1064.123152] env[69994]: DEBUG oslo_concurrency.lockutils [None req-86dbb778-48e9-4698-8c23-b943de5dd617 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Acquiring lock "799bf051-86b4-45bd-b9bf-df767074dac8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1064.123152] env[69994]: DEBUG oslo_concurrency.lockutils [None req-86dbb778-48e9-4698-8c23-b943de5dd617 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Lock "799bf051-86b4-45bd-b9bf-df767074dac8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1064.123333] env[69994]: DEBUG oslo_concurrency.lockutils [None req-86dbb778-48e9-4698-8c23-b943de5dd617 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Acquiring lock "799bf051-86b4-45bd-b9bf-df767074dac8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1064.123506] env[69994]: DEBUG oslo_concurrency.lockutils [None req-86dbb778-48e9-4698-8c23-b943de5dd617 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Lock "799bf051-86b4-45bd-b9bf-df767074dac8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1064.123666] env[69994]: DEBUG oslo_concurrency.lockutils [None req-86dbb778-48e9-4698-8c23-b943de5dd617 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Lock "799bf051-86b4-45bd-b9bf-df767074dac8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1064.127169] env[69994]: INFO nova.compute.manager [None req-86dbb778-48e9-4698-8c23-b943de5dd617 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Terminating instance [ 1064.149018] env[69994]: DEBUG nova.scheduler.client.report [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Refreshing trait associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1064.150717] env[69994]: DEBUG nova.network.neutron [req-1a9e090f-3a94-4db4-a943-4afbbdd253b2 req-992f5694-b535-4e07-b817-a257014c6dd6 service nova] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1064.162152] env[69994]: DEBUG oslo_vmware.api [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242440, 'name': PowerOnVM_Task, 'duration_secs': 0.851512} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.162152] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1064.162152] env[69994]: INFO nova.compute.manager [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Took 6.94 seconds to spawn the instance on the hypervisor. [ 1064.162930] env[69994]: DEBUG nova.compute.manager [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1064.163762] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e2c50b8-beb6-4433-81f1-4d048c63061f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.237607] env[69994]: DEBUG nova.network.neutron [req-1a9e090f-3a94-4db4-a943-4afbbdd253b2 req-992f5694-b535-4e07-b817-a257014c6dd6 service nova] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1064.276499] env[69994]: DEBUG nova.objects.instance [None req-796a5707-896e-4c55-892b-0a564dd18ba5 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lazy-loading 'flavor' on Instance uuid ef410b09-8686-409e-8391-d50cd0e0df04 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1064.394615] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9f1d452-6829-48cd-b19b-fc32df8c9935 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.402141] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aacaa034-3047-4125-b3fd-7b141a101402 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.407900] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b1cfe9a-1154-49a3-9bb7-2b35010110c1 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "e17fcc84-7c86-41b6-88ec-8a35619534b6" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.287s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1064.437877] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9dbb01d-5391-4a86-a71c-e3b6ccdb33c4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.445932] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1fc634e-11ac-40a3-94ac-0f5ff13f5d31 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.461670] env[69994]: DEBUG nova.compute.provider_tree [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1064.483817] env[69994]: INFO nova.compute.manager [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] instance snapshotting [ 1064.483817] env[69994]: WARNING nova.compute.manager [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 1064.486107] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f51e907-a383-4a62-8e6f-daabe52fa6d1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.505428] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a8c5b0a-d01b-4d78-bc84-84f437421114 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.533248] env[69994]: DEBUG nova.compute.manager [None req-608ef843-d541-4a48-a2e5-1969a4dba344 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1064.533478] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-608ef843-d541-4a48-a2e5-1969a4dba344 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1064.534494] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfb668fa-d910-4bb8-958e-3fd1e958bbb5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.542086] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-608ef843-d541-4a48-a2e5-1969a4dba344 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1064.542287] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8edc9f50-726d-4222-89fa-e8a93465eed6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.548934] env[69994]: DEBUG oslo_vmware.api [None req-608ef843-d541-4a48-a2e5-1969a4dba344 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Waiting for the task: (returnval){ [ 1064.548934] env[69994]: value = "task-3242442" [ 1064.548934] env[69994]: _type = "Task" [ 1064.548934] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.557362] env[69994]: DEBUG oslo_vmware.api [None req-608ef843-d541-4a48-a2e5-1969a4dba344 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242442, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.632106] env[69994]: DEBUG nova.compute.manager [None req-86dbb778-48e9-4698-8c23-b943de5dd617 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1064.632437] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-86dbb778-48e9-4698-8c23-b943de5dd617 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1064.633402] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dec50c9-2ec6-4f07-8f9a-04795e06dc77 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.641844] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-86dbb778-48e9-4698-8c23-b943de5dd617 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1064.642126] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6bb26b57-6c27-4960-a125-4c2b79ae78dc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.649108] env[69994]: DEBUG oslo_vmware.api [None req-86dbb778-48e9-4698-8c23-b943de5dd617 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Waiting for the task: (returnval){ [ 1064.649108] env[69994]: value = "task-3242443" [ 1064.649108] env[69994]: _type = "Task" [ 1064.649108] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.657430] env[69994]: DEBUG oslo_vmware.api [None req-86dbb778-48e9-4698-8c23-b943de5dd617 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242443, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.689049] env[69994]: INFO nova.compute.manager [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Took 29.02 seconds to build instance. [ 1064.741383] env[69994]: DEBUG oslo_concurrency.lockutils [req-1a9e090f-3a94-4db4-a943-4afbbdd253b2 req-992f5694-b535-4e07-b817-a257014c6dd6 service nova] Releasing lock "refresh_cache-0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1064.741841] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Acquired lock "refresh_cache-0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1064.742042] env[69994]: DEBUG nova.network.neutron [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1064.983873] env[69994]: ERROR nova.scheduler.client.report [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [req-bdf909e8-dc67-4077-9390-aa725e4fe9cb] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 92ce3c95-4efe-4d04-802b-6b187afc5aa7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-bdf909e8-dc67-4077-9390-aa725e4fe9cb"}]} [ 1065.000791] env[69994]: DEBUG nova.scheduler.client.report [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Refreshing inventories for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1065.013731] env[69994]: DEBUG nova.scheduler.client.report [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Updating ProviderTree inventory for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1065.013891] env[69994]: DEBUG nova.compute.provider_tree [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1065.016495] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Creating Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1065.017225] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-94d17e48-4180-4c9c-bf62-3ea449f4819c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.024760] env[69994]: DEBUG oslo_vmware.api [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1065.024760] env[69994]: value = "task-3242444" [ 1065.024760] env[69994]: _type = "Task" [ 1065.024760] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.030183] env[69994]: DEBUG nova.scheduler.client.report [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Refreshing aggregate associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1065.035974] env[69994]: DEBUG oslo_vmware.api [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242444, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.047312] env[69994]: DEBUG nova.scheduler.client.report [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Refreshing trait associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1065.059445] env[69994]: DEBUG oslo_vmware.api [None req-608ef843-d541-4a48-a2e5-1969a4dba344 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242442, 'name': PowerOffVM_Task, 'duration_secs': 0.197985} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.059710] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-608ef843-d541-4a48-a2e5-1969a4dba344 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1065.059924] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-608ef843-d541-4a48-a2e5-1969a4dba344 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1065.060215] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d5d00d0b-f41a-4b5d-a153-4f62bb9f22c8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.119585] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-608ef843-d541-4a48-a2e5-1969a4dba344 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1065.119669] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-608ef843-d541-4a48-a2e5-1969a4dba344 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1065.119798] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-608ef843-d541-4a48-a2e5-1969a4dba344 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Deleting the datastore file [datastore1] 384889a3-c3d9-4e0e-8d1c-95193cf4343d {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1065.120069] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bc3c2225-27fc-48f3-8cb5-195b3cd00078 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.127172] env[69994]: DEBUG oslo_vmware.api [None req-608ef843-d541-4a48-a2e5-1969a4dba344 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Waiting for the task: (returnval){ [ 1065.127172] env[69994]: value = "task-3242446" [ 1065.127172] env[69994]: _type = "Task" [ 1065.127172] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.135175] env[69994]: DEBUG oslo_vmware.api [None req-608ef843-d541-4a48-a2e5-1969a4dba344 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242446, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.159691] env[69994]: DEBUG oslo_vmware.api [None req-86dbb778-48e9-4698-8c23-b943de5dd617 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242443, 'name': PowerOffVM_Task, 'duration_secs': 0.21929} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.162185] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-86dbb778-48e9-4698-8c23-b943de5dd617 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1065.162375] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-86dbb778-48e9-4698-8c23-b943de5dd617 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1065.162805] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e19fc019-82b6-4b4d-9ed9-ac8fc87e3533 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.191569] env[69994]: DEBUG oslo_concurrency.lockutils [None req-12e43106-1353-4ee7-9a67-e2568e362c5d tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "fc31da72-d09e-415e-9866-3e7fc91fec79" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.532s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1065.224698] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-86dbb778-48e9-4698-8c23-b943de5dd617 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1065.224939] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-86dbb778-48e9-4698-8c23-b943de5dd617 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1065.225167] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-86dbb778-48e9-4698-8c23-b943de5dd617 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Deleting the datastore file [datastore1] 799bf051-86b4-45bd-b9bf-df767074dac8 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1065.225462] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0cf631f4-93f3-464c-894d-efcc1691c419 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.231208] env[69994]: DEBUG oslo_vmware.api [None req-86dbb778-48e9-4698-8c23-b943de5dd617 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Waiting for the task: (returnval){ [ 1065.231208] env[69994]: value = "task-3242448" [ 1065.231208] env[69994]: _type = "Task" [ 1065.231208] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.241095] env[69994]: DEBUG oslo_vmware.api [None req-86dbb778-48e9-4698-8c23-b943de5dd617 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242448, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.275035] env[69994]: DEBUG nova.network.neutron [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1065.280357] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aecbdb8f-e370-4a6b-b2b7-36239577ad5e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.283154] env[69994]: DEBUG oslo_concurrency.lockutils [None req-796a5707-896e-4c55-892b-0a564dd18ba5 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lock "ef410b09-8686-409e-8391-d50cd0e0df04" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.269s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1065.290093] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9460a9c-3ecf-4583-92d8-ee591a37271a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.324356] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf3411ec-d25a-439c-9e63-6e8ebec29584 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.332568] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de86d0c2-29ec-45b0-be55-2c1d2bc183e0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.346222] env[69994]: DEBUG nova.compute.provider_tree [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1065.423344] env[69994]: DEBUG nova.network.neutron [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Updating instance_info_cache with network_info: [{"id": "5068e2e3-d196-4aea-8f08-ae8d9c0239cc", "address": "fa:16:3e:e2:f1:ea", "network": {"id": "d82ff430-67a0-4383-be20-e2c3377827dc", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1161617245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "54c17fed52c145b08f369e78c279a4a5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5068e2e3-d1", "ovs_interfaceid": "5068e2e3-d196-4aea-8f08-ae8d9c0239cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1065.472936] env[69994]: DEBUG nova.compute.manager [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Stashing vm_state: active {{(pid=69994) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1065.492469] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a2cd9067-1a8f-4e09-8052-16ca32293001 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "fc31da72-d09e-415e-9866-3e7fc91fec79" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1065.492806] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a2cd9067-1a8f-4e09-8052-16ca32293001 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "fc31da72-d09e-415e-9866-3e7fc91fec79" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1065.493058] env[69994]: DEBUG nova.compute.manager [None req-a2cd9067-1a8f-4e09-8052-16ca32293001 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1065.494088] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49ba02a5-ce97-4dde-a292-9fdc0b123208 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.502355] env[69994]: DEBUG nova.compute.manager [None req-a2cd9067-1a8f-4e09-8052-16ca32293001 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69994) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1065.503094] env[69994]: DEBUG nova.objects.instance [None req-a2cd9067-1a8f-4e09-8052-16ca32293001 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lazy-loading 'flavor' on Instance uuid fc31da72-d09e-415e-9866-3e7fc91fec79 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1065.534215] env[69994]: DEBUG oslo_vmware.api [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242444, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.637637] env[69994]: DEBUG oslo_vmware.api [None req-608ef843-d541-4a48-a2e5-1969a4dba344 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242446, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136498} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.639022] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-608ef843-d541-4a48-a2e5-1969a4dba344 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1065.639022] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-608ef843-d541-4a48-a2e5-1969a4dba344 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1065.639022] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-608ef843-d541-4a48-a2e5-1969a4dba344 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1065.639022] env[69994]: INFO nova.compute.manager [None req-608ef843-d541-4a48-a2e5-1969a4dba344 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1065.639022] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-608ef843-d541-4a48-a2e5-1969a4dba344 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1065.639022] env[69994]: DEBUG nova.compute.manager [-] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1065.639022] env[69994]: DEBUG nova.network.neutron [-] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1065.741541] env[69994]: DEBUG oslo_vmware.api [None req-86dbb778-48e9-4698-8c23-b943de5dd617 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Task: {'id': task-3242448, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.125328} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.741900] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-86dbb778-48e9-4698-8c23-b943de5dd617 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1065.741969] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-86dbb778-48e9-4698-8c23-b943de5dd617 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1065.742119] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-86dbb778-48e9-4698-8c23-b943de5dd617 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1065.742297] env[69994]: INFO nova.compute.manager [None req-86dbb778-48e9-4698-8c23-b943de5dd617 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1065.742530] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-86dbb778-48e9-4698-8c23-b943de5dd617 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1065.742844] env[69994]: DEBUG nova.compute.manager [-] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1065.742844] env[69994]: DEBUG nova.network.neutron [-] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1065.886593] env[69994]: DEBUG nova.scheduler.client.report [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Updated inventory for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with generation 130 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1065.888013] env[69994]: DEBUG nova.compute.provider_tree [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Updating resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 generation from 130 to 131 during operation: update_inventory {{(pid=69994) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1065.888013] env[69994]: DEBUG nova.compute.provider_tree [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1065.925574] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Releasing lock "refresh_cache-0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1065.925897] env[69994]: DEBUG nova.compute.manager [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Instance network_info: |[{"id": "5068e2e3-d196-4aea-8f08-ae8d9c0239cc", "address": "fa:16:3e:e2:f1:ea", "network": {"id": "d82ff430-67a0-4383-be20-e2c3377827dc", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1161617245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "54c17fed52c145b08f369e78c279a4a5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5068e2e3-d1", "ovs_interfaceid": "5068e2e3-d196-4aea-8f08-ae8d9c0239cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1065.926322] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e2:f1:ea', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b1f3e6c3-5584-4852-9017-476ab8ac4946', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5068e2e3-d196-4aea-8f08-ae8d9c0239cc', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1065.934445] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Creating folder: Project (54c17fed52c145b08f369e78c279a4a5). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1065.935303] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3b820662-1b8e-46d9-87b2-3829d96c4ff3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.946602] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Created folder: Project (54c17fed52c145b08f369e78c279a4a5) in parent group-v647729. [ 1065.946796] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Creating folder: Instances. Parent ref: group-v647996. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1065.947040] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-88a80b5c-b793-40d9-898c-1edeeb2fe4e5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.958549] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Created folder: Instances in parent group-v647996. [ 1065.958818] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1065.961205] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1065.961839] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3bdba4af-d044-4ae5-8b59-da51add8302e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.986957] env[69994]: DEBUG nova.compute.manager [req-8d2e8a82-d72b-44b2-ad66-601f42a583e7 req-6c23785e-4e61-49ef-a94d-8bb832e02a3c service nova] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Received event network-vif-deleted-83645e16-3809-4855-9874-a71858e590d0 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1065.987919] env[69994]: INFO nova.compute.manager [req-8d2e8a82-d72b-44b2-ad66-601f42a583e7 req-6c23785e-4e61-49ef-a94d-8bb832e02a3c service nova] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Neutron deleted interface 83645e16-3809-4855-9874-a71858e590d0; detaching it from the instance and deleting it from the info cache [ 1065.987919] env[69994]: DEBUG nova.network.neutron [req-8d2e8a82-d72b-44b2-ad66-601f42a583e7 req-6c23785e-4e61-49ef-a94d-8bb832e02a3c service nova] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1065.990903] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1065.990903] env[69994]: value = "task-3242451" [ 1065.990903] env[69994]: _type = "Task" [ 1065.990903] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.996076] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1066.002118] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242451, 'name': CreateVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.035956] env[69994]: DEBUG oslo_vmware.api [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242444, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.038729] env[69994]: DEBUG nova.compute.manager [req-82ba2c68-c458-42bd-92d2-a0f65289f009 req-7b1b0128-d3e1-4975-841f-2ff2ea63975c service nova] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Received event network-vif-deleted-79ff1faf-be56-422b-9591-03f17c055f66 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1066.038928] env[69994]: INFO nova.compute.manager [req-82ba2c68-c458-42bd-92d2-a0f65289f009 req-7b1b0128-d3e1-4975-841f-2ff2ea63975c service nova] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Neutron deleted interface 79ff1faf-be56-422b-9591-03f17c055f66; detaching it from the instance and deleting it from the info cache [ 1066.039199] env[69994]: DEBUG nova.network.neutron [req-82ba2c68-c458-42bd-92d2-a0f65289f009 req-7b1b0128-d3e1-4975-841f-2ff2ea63975c service nova] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1066.395662] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 4.155s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1066.395787] env[69994]: INFO nova.compute.manager [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Migrating [ 1066.402299] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.734s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1066.403703] env[69994]: INFO nova.compute.claims [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1066.423291] env[69994]: DEBUG nova.network.neutron [-] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1066.463080] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f1d8c680-19dc-44ae-a325-3edb9e949dcb tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquiring lock "ef410b09-8686-409e-8391-d50cd0e0df04" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1066.463348] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f1d8c680-19dc-44ae-a325-3edb9e949dcb tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lock "ef410b09-8686-409e-8391-d50cd0e0df04" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1066.463557] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f1d8c680-19dc-44ae-a325-3edb9e949dcb tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquiring lock "ef410b09-8686-409e-8391-d50cd0e0df04-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1066.463741] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f1d8c680-19dc-44ae-a325-3edb9e949dcb tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lock "ef410b09-8686-409e-8391-d50cd0e0df04-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1066.463908] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f1d8c680-19dc-44ae-a325-3edb9e949dcb tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lock "ef410b09-8686-409e-8391-d50cd0e0df04-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1066.466484] env[69994]: INFO nova.compute.manager [None req-f1d8c680-19dc-44ae-a325-3edb9e949dcb tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Terminating instance [ 1066.483350] env[69994]: DEBUG nova.network.neutron [-] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1066.492856] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-05a85007-5a1d-4f13-a057-487ce5b867a3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.503712] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242451, 'name': CreateVM_Task, 'duration_secs': 0.392516} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.504706] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1066.505425] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1066.505589] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1066.505903] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1066.508792] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b0a6dbf-15ac-4099-91d5-365c5dc3258b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.519076] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f6da61f-1149-4b34-82d4-8ea58908ad23 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.521267] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2cd9067-1a8f-4e09-8052-16ca32293001 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1066.521500] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2a49e39b-2280-4ce6-b1c2-43affb4be596 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.526088] env[69994]: DEBUG oslo_vmware.api [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Waiting for the task: (returnval){ [ 1066.526088] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52878a63-7ae4-12d6-176b-6b9b0bf02669" [ 1066.526088] env[69994]: _type = "Task" [ 1066.526088] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.548254] env[69994]: DEBUG nova.compute.manager [req-8d2e8a82-d72b-44b2-ad66-601f42a583e7 req-6c23785e-4e61-49ef-a94d-8bb832e02a3c service nova] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Detach interface failed, port_id=83645e16-3809-4855-9874-a71858e590d0, reason: Instance 384889a3-c3d9-4e0e-8d1c-95193cf4343d could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1066.548764] env[69994]: DEBUG oslo_vmware.api [None req-a2cd9067-1a8f-4e09-8052-16ca32293001 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1066.548764] env[69994]: value = "task-3242452" [ 1066.548764] env[69994]: _type = "Task" [ 1066.548764] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.549627] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0823712d-8670-4025-a569-9f116a117017 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.560114] env[69994]: DEBUG oslo_vmware.api [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242444, 'name': CreateSnapshot_Task, 'duration_secs': 1.055874} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.560328] env[69994]: DEBUG oslo_vmware.api [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52878a63-7ae4-12d6-176b-6b9b0bf02669, 'name': SearchDatastore_Task, 'duration_secs': 0.010629} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.560900] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Created Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1066.561210] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1066.561431] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1066.561660] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1066.561803] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1066.561979] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1066.563038] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bbf45b3-fe07-4829-93fe-1cb0d0d3a478 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.565178] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-280ee06d-e194-4ac6-a68d-0b1873dbbe26 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.570360] env[69994]: DEBUG oslo_vmware.api [None req-a2cd9067-1a8f-4e09-8052-16ca32293001 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242452, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.573702] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-585e7fd1-ff7e-4d69-9f4c-40c13b93e2b9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.592015] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1066.592235] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1066.593511] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0dce1c1-df4f-4a03-b04c-ec7e28cf3dcd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.598327] env[69994]: DEBUG oslo_vmware.api [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Waiting for the task: (returnval){ [ 1066.598327] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]523cdc8b-ec55-a84e-8565-b8a76c1186df" [ 1066.598327] env[69994]: _type = "Task" [ 1066.598327] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.616273] env[69994]: DEBUG nova.compute.manager [req-82ba2c68-c458-42bd-92d2-a0f65289f009 req-7b1b0128-d3e1-4975-841f-2ff2ea63975c service nova] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Detach interface failed, port_id=79ff1faf-be56-422b-9591-03f17c055f66, reason: Instance 799bf051-86b4-45bd-b9bf-df767074dac8 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1066.616760] env[69994]: DEBUG oslo_vmware.api [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]523cdc8b-ec55-a84e-8565-b8a76c1186df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.917907] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "refresh_cache-ab99499b-21a2-465b-9975-4e0adb18df94" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1066.918085] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquired lock "refresh_cache-ab99499b-21a2-465b-9975-4e0adb18df94" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1066.918262] env[69994]: DEBUG nova.network.neutron [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1066.925904] env[69994]: INFO nova.compute.manager [-] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Took 1.29 seconds to deallocate network for instance. [ 1066.970067] env[69994]: DEBUG nova.compute.manager [None req-f1d8c680-19dc-44ae-a325-3edb9e949dcb tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1066.970271] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f1d8c680-19dc-44ae-a325-3edb9e949dcb tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1066.971833] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0e802ea-95cd-4140-9ec8-2ce646f43bd9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.979361] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1d8c680-19dc-44ae-a325-3edb9e949dcb tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1066.979609] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-49f5dbf2-4ccb-4cfe-817d-bd32947b6316 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.985244] env[69994]: INFO nova.compute.manager [-] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Took 1.24 seconds to deallocate network for instance. [ 1066.987473] env[69994]: DEBUG oslo_vmware.api [None req-f1d8c680-19dc-44ae-a325-3edb9e949dcb tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 1066.987473] env[69994]: value = "task-3242453" [ 1066.987473] env[69994]: _type = "Task" [ 1066.987473] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.997867] env[69994]: DEBUG oslo_vmware.api [None req-f1d8c680-19dc-44ae-a325-3edb9e949dcb tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242453, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.061910] env[69994]: DEBUG oslo_vmware.api [None req-a2cd9067-1a8f-4e09-8052-16ca32293001 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242452, 'name': PowerOffVM_Task, 'duration_secs': 0.278022} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.061910] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2cd9067-1a8f-4e09-8052-16ca32293001 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1067.061910] env[69994]: DEBUG nova.compute.manager [None req-a2cd9067-1a8f-4e09-8052-16ca32293001 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1067.062225] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea723768-0688-49bf-9b17-d923df7280cf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.102077] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Creating linked-clone VM from snapshot {{(pid=69994) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1067.102733] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-9515b462-97b9-4661-9eda-ade367df103d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.115834] env[69994]: DEBUG oslo_vmware.api [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]523cdc8b-ec55-a84e-8565-b8a76c1186df, 'name': SearchDatastore_Task, 'duration_secs': 0.014075} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.117688] env[69994]: DEBUG oslo_vmware.api [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1067.117688] env[69994]: value = "task-3242454" [ 1067.117688] env[69994]: _type = "Task" [ 1067.117688] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.117883] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-665335d0-5e13-4ae2-83ca-9ecf49c7f19c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.128085] env[69994]: DEBUG oslo_vmware.api [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242454, 'name': CloneVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.129395] env[69994]: DEBUG oslo_vmware.api [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Waiting for the task: (returnval){ [ 1067.129395] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52e3c77f-0f15-0cec-b7ca-cda31ee8d753" [ 1067.129395] env[69994]: _type = "Task" [ 1067.129395] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.138410] env[69994]: DEBUG oslo_vmware.api [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52e3c77f-0f15-0cec-b7ca-cda31ee8d753, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.432781] env[69994]: DEBUG oslo_concurrency.lockutils [None req-608ef843-d541-4a48-a2e5-1969a4dba344 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1067.494845] env[69994]: DEBUG oslo_concurrency.lockutils [None req-86dbb778-48e9-4698-8c23-b943de5dd617 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1067.502172] env[69994]: DEBUG oslo_vmware.api [None req-f1d8c680-19dc-44ae-a325-3edb9e949dcb tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242453, 'name': PowerOffVM_Task, 'duration_secs': 0.231333} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.505317] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1d8c680-19dc-44ae-a325-3edb9e949dcb tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1067.505556] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f1d8c680-19dc-44ae-a325-3edb9e949dcb tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1067.506089] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1985ff33-b510-4ecd-97f2-a5cceba55aab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.573347] env[69994]: DEBUG oslo_concurrency.lockutils [None req-a2cd9067-1a8f-4e09-8052-16ca32293001 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "fc31da72-d09e-415e-9866-3e7fc91fec79" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.080s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1067.584361] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f1d8c680-19dc-44ae-a325-3edb9e949dcb tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1067.584632] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f1d8c680-19dc-44ae-a325-3edb9e949dcb tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1067.584784] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1d8c680-19dc-44ae-a325-3edb9e949dcb tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Deleting the datastore file [datastore2] ef410b09-8686-409e-8391-d50cd0e0df04 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1067.587651] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eb86c8ac-63bd-4cba-aa95-ae9309083f8d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.596973] env[69994]: DEBUG oslo_vmware.api [None req-f1d8c680-19dc-44ae-a325-3edb9e949dcb tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 1067.596973] env[69994]: value = "task-3242456" [ 1067.596973] env[69994]: _type = "Task" [ 1067.596973] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.605878] env[69994]: DEBUG oslo_vmware.api [None req-f1d8c680-19dc-44ae-a325-3edb9e949dcb tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242456, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.629426] env[69994]: DEBUG oslo_vmware.api [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242454, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.639242] env[69994]: DEBUG oslo_vmware.api [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52e3c77f-0f15-0cec-b7ca-cda31ee8d753, 'name': SearchDatastore_Task, 'duration_secs': 0.016532} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.639528] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1067.639982] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c/0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1067.640103] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0e62b451-c14e-49f2-b183-02de112497da {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.649209] env[69994]: DEBUG oslo_vmware.api [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Waiting for the task: (returnval){ [ 1067.649209] env[69994]: value = "task-3242457" [ 1067.649209] env[69994]: _type = "Task" [ 1067.649209] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.658072] env[69994]: DEBUG oslo_vmware.api [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Task: {'id': task-3242457, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.709199] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a29ef6a-7bd0-4b6b-b9ad-e2f2bcdeb389 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.717361] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ebd0324-5b3b-4d6b-b46d-3d75cdaadca0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.751263] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be8b3f6e-f7ef-4401-b12a-c82029687320 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.758426] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d90181d0-571e-4fe4-865e-ba7a901d331e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.774502] env[69994]: DEBUG nova.compute.provider_tree [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1067.793298] env[69994]: DEBUG nova.network.neutron [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Updating instance_info_cache with network_info: [{"id": "92378003-993a-43f2-8823-55a4b83acdef", "address": "fa:16:3e:a4:fe:80", "network": {"id": "0e925a40-2706-4137-8dd4-eeed26ae606e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1885024452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "377f65074c2442588aee091b5165e1cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92378003-99", "ovs_interfaceid": "92378003-993a-43f2-8823-55a4b83acdef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1068.070701] env[69994]: INFO nova.compute.manager [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Rebuilding instance [ 1068.112779] env[69994]: DEBUG oslo_vmware.api [None req-f1d8c680-19dc-44ae-a325-3edb9e949dcb tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242456, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171609} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.115675] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1d8c680-19dc-44ae-a325-3edb9e949dcb tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1068.115876] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f1d8c680-19dc-44ae-a325-3edb9e949dcb tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1068.116083] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f1d8c680-19dc-44ae-a325-3edb9e949dcb tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1068.116275] env[69994]: INFO nova.compute.manager [None req-f1d8c680-19dc-44ae-a325-3edb9e949dcb tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1068.116519] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f1d8c680-19dc-44ae-a325-3edb9e949dcb tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1068.118561] env[69994]: DEBUG nova.compute.manager [-] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1068.118672] env[69994]: DEBUG nova.network.neutron [-] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1068.120370] env[69994]: DEBUG nova.compute.manager [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1068.121212] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac129273-2b55-4604-aff6-bbd5c7492b47 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.135717] env[69994]: DEBUG oslo_vmware.api [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242454, 'name': CloneVM_Task} progress is 95%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.159500] env[69994]: DEBUG oslo_vmware.api [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Task: {'id': task-3242457, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.279100] env[69994]: DEBUG nova.scheduler.client.report [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1068.295789] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Releasing lock "refresh_cache-ab99499b-21a2-465b-9975-4e0adb18df94" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1068.633738] env[69994]: DEBUG oslo_vmware.api [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242454, 'name': CloneVM_Task, 'duration_secs': 1.232364} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.634056] env[69994]: INFO nova.virt.vmwareapi.vmops [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Created linked-clone VM from snapshot [ 1068.635020] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83d2812a-32c6-4379-a61e-a88e62e72517 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.644016] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Uploading image a9ff1b20-7b93-4d2f-b2ea-ac788dd27141 {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1068.659203] env[69994]: DEBUG oslo_vmware.api [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Task: {'id': task-3242457, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.673201] env[69994]: DEBUG oslo_vmware.rw_handles [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1068.673201] env[69994]: value = "vm-648000" [ 1068.673201] env[69994]: _type = "VirtualMachine" [ 1068.673201] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1068.673464] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-866e0f11-4712-4f34-8619-e0a3b2e079a0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.681577] env[69994]: DEBUG oslo_vmware.rw_handles [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lease: (returnval){ [ 1068.681577] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52f578eb-e0e6-92ab-a869-d67967fb01d9" [ 1068.681577] env[69994]: _type = "HttpNfcLease" [ 1068.681577] env[69994]: } obtained for exporting VM: (result){ [ 1068.681577] env[69994]: value = "vm-648000" [ 1068.681577] env[69994]: _type = "VirtualMachine" [ 1068.681577] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1068.681883] env[69994]: DEBUG oslo_vmware.api [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the lease: (returnval){ [ 1068.681883] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52f578eb-e0e6-92ab-a869-d67967fb01d9" [ 1068.681883] env[69994]: _type = "HttpNfcLease" [ 1068.681883] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1068.688994] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1068.688994] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52f578eb-e0e6-92ab-a869-d67967fb01d9" [ 1068.688994] env[69994]: _type = "HttpNfcLease" [ 1068.688994] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1068.693945] env[69994]: DEBUG nova.compute.manager [req-37b62511-2eea-4dc0-a72b-f58a84fe23ce req-57a482b9-17c2-4696-8d35-14d5774c038e service nova] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Received event network-vif-deleted-37af0480-c14f-4941-b963-b25c22c833b3 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1068.694153] env[69994]: INFO nova.compute.manager [req-37b62511-2eea-4dc0-a72b-f58a84fe23ce req-57a482b9-17c2-4696-8d35-14d5774c038e service nova] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Neutron deleted interface 37af0480-c14f-4941-b963-b25c22c833b3; detaching it from the instance and deleting it from the info cache [ 1068.694382] env[69994]: DEBUG nova.network.neutron [req-37b62511-2eea-4dc0-a72b-f58a84fe23ce req-57a482b9-17c2-4696-8d35-14d5774c038e service nova] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1068.785064] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.383s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1068.786120] env[69994]: DEBUG nova.compute.manager [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1068.789691] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d3724d59-c68e-49fb-acfe-5cfbf52d668a tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.205s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1068.789902] env[69994]: DEBUG nova.objects.instance [None req-d3724d59-c68e-49fb-acfe-5cfbf52d668a tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Lazy-loading 'resources' on Instance uuid 850930f9-d5fb-4546-9796-30e164a1cdd3 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1069.141039] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1069.141332] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-08711c1f-7ee1-4083-aa96-ec10fa1ca886 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.148689] env[69994]: DEBUG oslo_vmware.api [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1069.148689] env[69994]: value = "task-3242459" [ 1069.148689] env[69994]: _type = "Task" [ 1069.148689] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.159997] env[69994]: DEBUG oslo_vmware.api [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Task: {'id': task-3242457, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.163425] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] VM already powered off {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1069.163666] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1069.164416] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85b8e244-eec3-46a0-85f4-18965131d265 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.170690] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1069.170941] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a2a7abbf-b229-4af8-aa56-7ac40844ad50 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.178029] env[69994]: DEBUG nova.network.neutron [-] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1069.190188] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1069.190188] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52f578eb-e0e6-92ab-a869-d67967fb01d9" [ 1069.190188] env[69994]: _type = "HttpNfcLease" [ 1069.190188] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1069.190603] env[69994]: DEBUG oslo_vmware.rw_handles [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1069.190603] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52f578eb-e0e6-92ab-a869-d67967fb01d9" [ 1069.190603] env[69994]: _type = "HttpNfcLease" [ 1069.190603] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1069.191394] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-898159d3-dbea-4640-8aaf-aa1af7f81ff1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.199286] env[69994]: DEBUG oslo_vmware.rw_handles [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528daad8-5576-735d-1c54-a4d54e61b02b/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1069.199462] env[69994]: DEBUG oslo_vmware.rw_handles [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528daad8-5576-735d-1c54-a4d54e61b02b/disk-0.vmdk for reading. {{(pid=69994) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1069.200779] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3a8b8bce-ff6f-453e-8570-26d137ea1e51 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.259534] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1069.259790] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1069.259972] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Deleting the datastore file [datastore2] fc31da72-d09e-415e-9866-3e7fc91fec79 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1069.261488] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ad13efcf-afef-4a2c-8504-b8cea0b31fb3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.266025] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7cf62ff-fed9-4099-b368-00439d8e97fb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.281340] env[69994]: DEBUG oslo_vmware.api [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1069.281340] env[69994]: value = "task-3242461" [ 1069.281340] env[69994]: _type = "Task" [ 1069.281340] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.288517] env[69994]: DEBUG oslo_vmware.api [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242461, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.299036] env[69994]: DEBUG nova.compute.utils [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1069.302753] env[69994]: DEBUG nova.compute.manager [req-37b62511-2eea-4dc0-a72b-f58a84fe23ce req-57a482b9-17c2-4696-8d35-14d5774c038e service nova] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Detach interface failed, port_id=37af0480-c14f-4941-b963-b25c22c833b3, reason: Instance ef410b09-8686-409e-8391-d50cd0e0df04 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1069.303654] env[69994]: DEBUG nova.compute.manager [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1069.303825] env[69994]: DEBUG nova.network.neutron [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1069.312173] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-1bc2b41e-dfbe-4656-95b3-ce58fcf64484 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.356422] env[69994]: DEBUG nova.policy [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bb1874902bc24959b717674a99e530a9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ee188ea80c9847188df8b8482b7c6ec7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1069.593390] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52037acd-5fbd-4489-a037-2963332641b9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.605937] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7d02384-7879-4ada-8f60-f7171b0ee108 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.641094] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e673b32f-0fae-4783-b458-de4f21eb8436 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.649975] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69e2c024-8c3b-4e58-9b9e-bb0622e4bb29 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.668879] env[69994]: DEBUG nova.compute.provider_tree [None req-d3724d59-c68e-49fb-acfe-5cfbf52d668a tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1069.677698] env[69994]: DEBUG oslo_vmware.api [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Task: {'id': task-3242457, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.520679} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.677698] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c/0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1069.677698] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1069.677698] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a6924799-c2db-4828-bbe5-35f357593031 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.681225] env[69994]: INFO nova.compute.manager [-] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Took 1.56 seconds to deallocate network for instance. [ 1069.684664] env[69994]: DEBUG oslo_vmware.api [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Waiting for the task: (returnval){ [ 1069.684664] env[69994]: value = "task-3242462" [ 1069.684664] env[69994]: _type = "Task" [ 1069.684664] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.698734] env[69994]: DEBUG oslo_vmware.api [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Task: {'id': task-3242462, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.747635] env[69994]: DEBUG nova.network.neutron [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Successfully created port: ecc3d187-2e74-4aab-9518-f073b26b0101 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1069.791623] env[69994]: DEBUG oslo_vmware.api [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242461, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132841} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.791762] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1069.791922] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1069.792264] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1069.803948] env[69994]: DEBUG nova.compute.manager [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1069.816318] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23b3a8b1-9eab-4470-a72c-f795c31fdbb5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.836559] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Updating instance 'ab99499b-21a2-465b-9975-4e0adb18df94' progress to 0 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1070.177746] env[69994]: DEBUG nova.scheduler.client.report [None req-d3724d59-c68e-49fb-acfe-5cfbf52d668a tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1070.189275] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f1d8c680-19dc-44ae-a325-3edb9e949dcb tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1070.198953] env[69994]: DEBUG oslo_vmware.api [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Task: {'id': task-3242462, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06359} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.199230] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1070.200026] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-febe18b9-77c6-433f-9aab-ef4813c9375d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.224089] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c/0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1070.224896] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd1bac82-14a7-4fbb-872e-eb2f5a5f91cb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.244197] env[69994]: DEBUG oslo_vmware.api [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Waiting for the task: (returnval){ [ 1070.244197] env[69994]: value = "task-3242463" [ 1070.244197] env[69994]: _type = "Task" [ 1070.244197] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.252523] env[69994]: DEBUG oslo_vmware.api [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Task: {'id': task-3242463, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.344222] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1070.345348] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-003d9681-5a93-49cb-9371-208612ca7f8d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.353632] env[69994]: DEBUG oslo_vmware.api [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1070.353632] env[69994]: value = "task-3242464" [ 1070.353632] env[69994]: _type = "Task" [ 1070.353632] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.363368] env[69994]: DEBUG oslo_vmware.api [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242464, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.684064] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d3724d59-c68e-49fb-acfe-5cfbf52d668a tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.894s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1070.686355] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.983s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1070.687972] env[69994]: INFO nova.compute.claims [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1070.705827] env[69994]: INFO nova.scheduler.client.report [None req-d3724d59-c68e-49fb-acfe-5cfbf52d668a tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Deleted allocations for instance 850930f9-d5fb-4546-9796-30e164a1cdd3 [ 1070.754742] env[69994]: DEBUG oslo_vmware.api [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Task: {'id': task-3242463, 'name': ReconfigVM_Task, 'duration_secs': 0.304569} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.756215] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Reconfigured VM instance instance-0000005d to attach disk [datastore1] 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c/0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1070.757039] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5232cb35-41a3-41e9-a181-5a10a31cd2d0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.765198] env[69994]: DEBUG oslo_vmware.api [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Waiting for the task: (returnval){ [ 1070.765198] env[69994]: value = "task-3242465" [ 1070.765198] env[69994]: _type = "Task" [ 1070.765198] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.775051] env[69994]: DEBUG oslo_vmware.api [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Task: {'id': task-3242465, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.815243] env[69994]: DEBUG nova.compute.manager [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1070.839498] env[69994]: DEBUG nova.virt.hardware [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1070.839738] env[69994]: DEBUG nova.virt.hardware [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1070.839895] env[69994]: DEBUG nova.virt.hardware [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1070.840097] env[69994]: DEBUG nova.virt.hardware [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1070.840247] env[69994]: DEBUG nova.virt.hardware [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1070.840391] env[69994]: DEBUG nova.virt.hardware [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1070.840617] env[69994]: DEBUG nova.virt.hardware [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1070.840869] env[69994]: DEBUG nova.virt.hardware [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1070.841054] env[69994]: DEBUG nova.virt.hardware [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1070.841223] env[69994]: DEBUG nova.virt.hardware [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1070.841412] env[69994]: DEBUG nova.virt.hardware [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1070.843632] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b4074e0-73ad-4a04-8b37-40879bf68d93 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.853636] env[69994]: DEBUG nova.virt.hardware [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1070.853885] env[69994]: DEBUG nova.virt.hardware [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1070.854464] env[69994]: DEBUG nova.virt.hardware [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1070.854464] env[69994]: DEBUG nova.virt.hardware [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1070.854464] env[69994]: DEBUG nova.virt.hardware [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1070.854619] env[69994]: DEBUG nova.virt.hardware [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1070.854751] env[69994]: DEBUG nova.virt.hardware [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1070.854914] env[69994]: DEBUG nova.virt.hardware [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1070.855103] env[69994]: DEBUG nova.virt.hardware [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1070.855280] env[69994]: DEBUG nova.virt.hardware [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1070.855452] env[69994]: DEBUG nova.virt.hardware [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1070.856829] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2f0da17-1d98-489f-8335-1f267f86687d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.861892] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17e8d879-df9e-4db7-9a5f-6fd63190c5f2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.882090] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:c2:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c2383c27-232e-4745-9b0a-2dcbaabb188b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '49963331-a486-495f-a065-cbcd2c380941', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1070.889714] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1070.890065] env[69994]: DEBUG oslo_vmware.api [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242464, 'name': PowerOffVM_Task, 'duration_secs': 0.208804} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.891333] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-974f8a1c-edc1-4cc6-b78b-4c42ee4e4672 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.895370] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1070.895671] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1070.895858] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Updating instance 'ab99499b-21a2-465b-9975-4e0adb18df94' progress to 17 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1070.899782] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-65202388-f7c0-4b4b-acee-760e68dae6e1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.926615] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1070.926615] env[69994]: value = "task-3242466" [ 1070.926615] env[69994]: _type = "Task" [ 1070.926615] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.934984] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242466, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.214024] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d3724d59-c68e-49fb-acfe-5cfbf52d668a tempest-ServersTestJSON-2114336951 tempest-ServersTestJSON-2114336951-project-member] Lock "850930f9-d5fb-4546-9796-30e164a1cdd3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.250s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1071.281276] env[69994]: DEBUG oslo_vmware.api [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Task: {'id': task-3242465, 'name': Rename_Task, 'duration_secs': 0.187582} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.282293] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1071.282794] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-36fd9ccf-2a15-4921-a3da-a064833cc5f9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.290812] env[69994]: DEBUG oslo_vmware.api [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Waiting for the task: (returnval){ [ 1071.290812] env[69994]: value = "task-3242467" [ 1071.290812] env[69994]: _type = "Task" [ 1071.290812] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.303462] env[69994]: DEBUG oslo_vmware.api [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Task: {'id': task-3242467, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.417199] env[69994]: DEBUG nova.virt.hardware [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1071.417477] env[69994]: DEBUG nova.virt.hardware [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1071.417630] env[69994]: DEBUG nova.virt.hardware [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1071.417813] env[69994]: DEBUG nova.virt.hardware [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1071.417960] env[69994]: DEBUG nova.virt.hardware [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1071.418277] env[69994]: DEBUG nova.virt.hardware [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1071.418504] env[69994]: DEBUG nova.virt.hardware [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1071.418738] env[69994]: DEBUG nova.virt.hardware [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1071.418874] env[69994]: DEBUG nova.virt.hardware [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1071.419059] env[69994]: DEBUG nova.virt.hardware [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1071.419360] env[69994]: DEBUG nova.virt.hardware [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1071.424797] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-88e6c02f-1509-4d5b-97d5-868656a41149 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.447170] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242466, 'name': CreateVM_Task, 'duration_secs': 0.357043} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.448869] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1071.449228] env[69994]: DEBUG oslo_vmware.api [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1071.449228] env[69994]: value = "task-3242468" [ 1071.449228] env[69994]: _type = "Task" [ 1071.449228] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.450089] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1071.450650] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1071.450760] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1071.451024] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24331713-e93f-4a17-b7f8-52a6ef2d4b21 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.467194] env[69994]: DEBUG oslo_vmware.api [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1071.467194] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52214086-d5ed-0f15-6707-45188425a6c5" [ 1071.467194] env[69994]: _type = "Task" [ 1071.467194] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.471054] env[69994]: DEBUG oslo_vmware.api [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242468, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.480886] env[69994]: DEBUG oslo_vmware.api [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52214086-d5ed-0f15-6707-45188425a6c5, 'name': SearchDatastore_Task, 'duration_secs': 0.010266} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.483553] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1071.483553] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1071.483553] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1071.483553] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1071.483553] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1071.483553] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b42120e6-4bff-4a06-9412-7a2cfd98b7c4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.492643] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1071.492643] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1071.492643] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6265737-2393-48b8-acbe-0290142a1841 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.500060] env[69994]: DEBUG nova.compute.manager [req-9783bde3-b088-4072-9d37-d7706ddee2a1 req-7f52023b-8d1a-45f9-822b-a13d65e07239 service nova] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Received event network-vif-plugged-ecc3d187-2e74-4aab-9518-f073b26b0101 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1071.500305] env[69994]: DEBUG oslo_concurrency.lockutils [req-9783bde3-b088-4072-9d37-d7706ddee2a1 req-7f52023b-8d1a-45f9-822b-a13d65e07239 service nova] Acquiring lock "1a5b269f-5ee8-4bcc-812e-78388edb1e50-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1071.500859] env[69994]: DEBUG oslo_concurrency.lockutils [req-9783bde3-b088-4072-9d37-d7706ddee2a1 req-7f52023b-8d1a-45f9-822b-a13d65e07239 service nova] Lock "1a5b269f-5ee8-4bcc-812e-78388edb1e50-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1071.501086] env[69994]: DEBUG oslo_concurrency.lockutils [req-9783bde3-b088-4072-9d37-d7706ddee2a1 req-7f52023b-8d1a-45f9-822b-a13d65e07239 service nova] Lock "1a5b269f-5ee8-4bcc-812e-78388edb1e50-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1071.501281] env[69994]: DEBUG nova.compute.manager [req-9783bde3-b088-4072-9d37-d7706ddee2a1 req-7f52023b-8d1a-45f9-822b-a13d65e07239 service nova] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] No waiting events found dispatching network-vif-plugged-ecc3d187-2e74-4aab-9518-f073b26b0101 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1071.501454] env[69994]: WARNING nova.compute.manager [req-9783bde3-b088-4072-9d37-d7706ddee2a1 req-7f52023b-8d1a-45f9-822b-a13d65e07239 service nova] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Received unexpected event network-vif-plugged-ecc3d187-2e74-4aab-9518-f073b26b0101 for instance with vm_state building and task_state spawning. [ 1071.505512] env[69994]: DEBUG oslo_vmware.api [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1071.505512] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52472b2d-2f1f-4b84-a702-f0085d637ad4" [ 1071.505512] env[69994]: _type = "Task" [ 1071.505512] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.514085] env[69994]: DEBUG oslo_vmware.api [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52472b2d-2f1f-4b84-a702-f0085d637ad4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.611835] env[69994]: DEBUG nova.network.neutron [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Successfully updated port: ecc3d187-2e74-4aab-9518-f073b26b0101 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1071.803091] env[69994]: DEBUG oslo_vmware.api [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Task: {'id': task-3242467, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.956381] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e35222e8-0d1f-4ce1-ae48-207343fcb97a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.964853] env[69994]: DEBUG oslo_vmware.api [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242468, 'name': ReconfigVM_Task, 'duration_secs': 0.22547} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.966939] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Updating instance 'ab99499b-21a2-465b-9975-4e0adb18df94' progress to 33 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1071.971787] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5d85372-653e-46e1-9d26-c255e4049bd8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.012061] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f0a3dbe-4eb3-488d-892b-2010a9e5e81b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.023315] env[69994]: DEBUG oslo_vmware.api [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52472b2d-2f1f-4b84-a702-f0085d637ad4, 'name': SearchDatastore_Task, 'duration_secs': 0.012851} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.029304] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-498aa7a9-e979-4d72-a1a1-157d321f4077 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.033677] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac40422f-14c6-430e-87a5-f772f66d67b7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.044781] env[69994]: DEBUG oslo_vmware.api [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1072.044781] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52e92ca9-9ea2-717d-b341-38e6bc1d54c2" [ 1072.044781] env[69994]: _type = "Task" [ 1072.044781] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.054293] env[69994]: DEBUG nova.compute.provider_tree [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1072.064671] env[69994]: DEBUG oslo_vmware.api [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52e92ca9-9ea2-717d-b341-38e6bc1d54c2, 'name': SearchDatastore_Task, 'duration_secs': 0.013856} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.065536] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1072.065871] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] fc31da72-d09e-415e-9866-3e7fc91fec79/fc31da72-d09e-415e-9866-3e7fc91fec79.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1072.066070] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-717ea2e9-4238-4639-b39b-6a4e6b580cbe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.072962] env[69994]: DEBUG oslo_vmware.api [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1072.072962] env[69994]: value = "task-3242469" [ 1072.072962] env[69994]: _type = "Task" [ 1072.072962] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.081608] env[69994]: DEBUG oslo_vmware.api [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242469, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.114034] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "refresh_cache-1a5b269f-5ee8-4bcc-812e-78388edb1e50" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1072.114034] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquired lock "refresh_cache-1a5b269f-5ee8-4bcc-812e-78388edb1e50" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1072.114034] env[69994]: DEBUG nova.network.neutron [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1072.305969] env[69994]: DEBUG oslo_vmware.api [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Task: {'id': task-3242467, 'name': PowerOnVM_Task, 'duration_secs': 0.528874} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.306439] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1072.306788] env[69994]: INFO nova.compute.manager [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Took 9.93 seconds to spawn the instance on the hypervisor. [ 1072.307118] env[69994]: DEBUG nova.compute.manager [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1072.308251] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f46622f8-c710-4aa4-bba8-4232b36c4fbd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.483225] env[69994]: DEBUG nova.virt.hardware [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1072.483225] env[69994]: DEBUG nova.virt.hardware [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1072.483225] env[69994]: DEBUG nova.virt.hardware [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1072.483225] env[69994]: DEBUG nova.virt.hardware [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1072.483225] env[69994]: DEBUG nova.virt.hardware [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1072.483690] env[69994]: DEBUG nova.virt.hardware [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1072.484161] env[69994]: DEBUG nova.virt.hardware [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1072.484482] env[69994]: DEBUG nova.virt.hardware [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1072.484903] env[69994]: DEBUG nova.virt.hardware [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1072.488017] env[69994]: DEBUG nova.virt.hardware [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1072.488017] env[69994]: DEBUG nova.virt.hardware [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1072.491681] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Reconfiguring VM instance instance-0000005a to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1072.492266] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ffc20f86-873d-4fb6-9cd3-4d8e3c1734ca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.520026] env[69994]: DEBUG oslo_vmware.api [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1072.520026] env[69994]: value = "task-3242470" [ 1072.520026] env[69994]: _type = "Task" [ 1072.520026] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.530874] env[69994]: DEBUG oslo_vmware.api [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242470, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.560803] env[69994]: DEBUG nova.scheduler.client.report [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1072.584439] env[69994]: DEBUG oslo_vmware.api [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242469, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.649672] env[69994]: DEBUG nova.network.neutron [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1072.805975] env[69994]: DEBUG nova.network.neutron [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Updating instance_info_cache with network_info: [{"id": "ecc3d187-2e74-4aab-9518-f073b26b0101", "address": "fa:16:3e:37:61:c7", "network": {"id": "14e5b992-c393-4acc-ab6d-ad5983fe2729", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1321568807-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee188ea80c9847188df8b8482b7c6ec7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapecc3d187-2e", "ovs_interfaceid": "ecc3d187-2e74-4aab-9518-f073b26b0101", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1072.830978] env[69994]: INFO nova.compute.manager [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Took 26.51 seconds to build instance. [ 1073.031031] env[69994]: DEBUG oslo_vmware.api [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242470, 'name': ReconfigVM_Task, 'duration_secs': 0.226243} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.031401] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Reconfigured VM instance instance-0000005a to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1073.032398] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93d245d6-b584-4105-8fb1-2c32ac72763e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.060650] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] ab99499b-21a2-465b-9975-4e0adb18df94/ab99499b-21a2-465b-9975-4e0adb18df94.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1073.061064] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-80099d25-5794-4b08-ac16-aa89a18cd0d0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.075870] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.389s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1073.076720] env[69994]: DEBUG nova.compute.manager [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1073.079736] env[69994]: DEBUG oslo_concurrency.lockutils [None req-335c38ea-237c-4ee6-b277-fd942030f610 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.409s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1073.081813] env[69994]: DEBUG nova.objects.instance [None req-335c38ea-237c-4ee6-b277-fd942030f610 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lazy-loading 'resources' on Instance uuid 29071eb9-6334-4c23-acb4-142c12aa448d {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1073.090792] env[69994]: DEBUG oslo_vmware.api [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242469, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.541424} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.092275] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] fc31da72-d09e-415e-9866-3e7fc91fec79/fc31da72-d09e-415e-9866-3e7fc91fec79.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1073.092542] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1073.092955] env[69994]: DEBUG oslo_vmware.api [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1073.092955] env[69994]: value = "task-3242471" [ 1073.092955] env[69994]: _type = "Task" [ 1073.092955] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.093674] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6e3a920f-d877-4404-a5a6-40e572484de9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.106556] env[69994]: DEBUG oslo_vmware.api [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242471, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.109659] env[69994]: DEBUG oslo_vmware.api [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1073.109659] env[69994]: value = "task-3242472" [ 1073.109659] env[69994]: _type = "Task" [ 1073.109659] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.120103] env[69994]: DEBUG oslo_vmware.api [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242472, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.312668] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Releasing lock "refresh_cache-1a5b269f-5ee8-4bcc-812e-78388edb1e50" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1073.313106] env[69994]: DEBUG nova.compute.manager [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Instance network_info: |[{"id": "ecc3d187-2e74-4aab-9518-f073b26b0101", "address": "fa:16:3e:37:61:c7", "network": {"id": "14e5b992-c393-4acc-ab6d-ad5983fe2729", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1321568807-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee188ea80c9847188df8b8482b7c6ec7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapecc3d187-2e", "ovs_interfaceid": "ecc3d187-2e74-4aab-9518-f073b26b0101", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1073.313585] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:37:61:c7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e31264e2-3e0a-4dfb-ba1f-6389d7d47548', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ecc3d187-2e74-4aab-9518-f073b26b0101', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1073.321450] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1073.321713] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1073.322100] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a379955d-d5ac-4c35-ac75-9aacfc6da69d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.338404] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eff05dbc-1f8b-45c0-8394-f3d30ebe52ea tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Lock "0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.030s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1073.349584] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1073.349584] env[69994]: value = "task-3242473" [ 1073.349584] env[69994]: _type = "Task" [ 1073.349584] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.360616] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242473, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.593043] env[69994]: DEBUG nova.compute.utils [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1073.593043] env[69994]: DEBUG nova.compute.manager [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1073.593043] env[69994]: DEBUG nova.network.neutron [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1073.604691] env[69994]: DEBUG nova.compute.manager [req-9a37bd58-ce9e-4fd8-a293-c9a87f9d2bb5 req-efe2caaf-b8da-4ef3-83bc-2375c15bad38 service nova] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Received event network-changed-ecc3d187-2e74-4aab-9518-f073b26b0101 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1073.606642] env[69994]: DEBUG nova.compute.manager [req-9a37bd58-ce9e-4fd8-a293-c9a87f9d2bb5 req-efe2caaf-b8da-4ef3-83bc-2375c15bad38 service nova] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Refreshing instance network info cache due to event network-changed-ecc3d187-2e74-4aab-9518-f073b26b0101. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1073.606642] env[69994]: DEBUG oslo_concurrency.lockutils [req-9a37bd58-ce9e-4fd8-a293-c9a87f9d2bb5 req-efe2caaf-b8da-4ef3-83bc-2375c15bad38 service nova] Acquiring lock "refresh_cache-1a5b269f-5ee8-4bcc-812e-78388edb1e50" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1073.606642] env[69994]: DEBUG oslo_concurrency.lockutils [req-9a37bd58-ce9e-4fd8-a293-c9a87f9d2bb5 req-efe2caaf-b8da-4ef3-83bc-2375c15bad38 service nova] Acquired lock "refresh_cache-1a5b269f-5ee8-4bcc-812e-78388edb1e50" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1073.606790] env[69994]: DEBUG nova.network.neutron [req-9a37bd58-ce9e-4fd8-a293-c9a87f9d2bb5 req-efe2caaf-b8da-4ef3-83bc-2375c15bad38 service nova] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Refreshing network info cache for port ecc3d187-2e74-4aab-9518-f073b26b0101 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1073.611582] env[69994]: DEBUG oslo_vmware.api [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242471, 'name': ReconfigVM_Task, 'duration_secs': 0.366918} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.615651] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Reconfigured VM instance instance-0000005a to attach disk [datastore1] ab99499b-21a2-465b-9975-4e0adb18df94/ab99499b-21a2-465b-9975-4e0adb18df94.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1073.615835] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Updating instance 'ab99499b-21a2-465b-9975-4e0adb18df94' progress to 50 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1073.627555] env[69994]: DEBUG oslo_vmware.api [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242472, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073453} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.627667] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1073.628737] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56f2243b-e3b4-453a-9303-f0407847cf2d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.665518] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] fc31da72-d09e-415e-9866-3e7fc91fec79/fc31da72-d09e-415e-9866-3e7fc91fec79.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1073.667333] env[69994]: DEBUG nova.policy [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '08a2b92b6c0141a6a7e301e064032289', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '38d5a89ed7c248c3be506ef12caf5f1e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1073.671813] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0584d8d4-9a85-4388-ac04-158a2ddb3873 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.695746] env[69994]: DEBUG oslo_vmware.api [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1073.695746] env[69994]: value = "task-3242474" [ 1073.695746] env[69994]: _type = "Task" [ 1073.695746] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.711611] env[69994]: DEBUG oslo_vmware.api [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242474, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.859654] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242473, 'name': CreateVM_Task, 'duration_secs': 0.499538} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.859855] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1073.860396] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1073.860587] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1073.860986] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1073.861166] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70b97f9b-dcec-4ebc-acc4-c30a1526442e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.870083] env[69994]: DEBUG oslo_vmware.api [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 1073.870083] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52d09eee-461a-9eda-a729-c626e8f3a969" [ 1073.870083] env[69994]: _type = "Task" [ 1073.870083] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.880090] env[69994]: DEBUG oslo_vmware.api [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52d09eee-461a-9eda-a729-c626e8f3a969, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.942736] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f10ac33-0990-48ef-94d0-af32d121db6a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.951435] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73956cd2-71c4-4b24-abc1-46829f11ed1f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.985185] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b9fe63d-24f9-4f26-a87b-6aca7bb266ba {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.994528] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-812ef396-0d82-4d82-b580-582fb11ad413 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.009459] env[69994]: DEBUG nova.compute.provider_tree [None req-335c38ea-237c-4ee6-b277-fd942030f610 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1074.065053] env[69994]: DEBUG nova.network.neutron [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Successfully created port: 1a71caa6-eaba-4605-b4de-9df7bfa68007 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1074.096654] env[69994]: DEBUG nova.compute.manager [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1074.125693] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93e1faa6-c444-48ba-8885-8b82a68c59da {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.170221] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab7511f4-a79e-4353-936d-343cf93d436a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.189403] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Updating instance 'ab99499b-21a2-465b-9975-4e0adb18df94' progress to 67 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1074.210815] env[69994]: DEBUG oslo_vmware.api [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242474, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.369627] env[69994]: DEBUG nova.network.neutron [req-9a37bd58-ce9e-4fd8-a293-c9a87f9d2bb5 req-efe2caaf-b8da-4ef3-83bc-2375c15bad38 service nova] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Updated VIF entry in instance network info cache for port ecc3d187-2e74-4aab-9518-f073b26b0101. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1074.370017] env[69994]: DEBUG nova.network.neutron [req-9a37bd58-ce9e-4fd8-a293-c9a87f9d2bb5 req-efe2caaf-b8da-4ef3-83bc-2375c15bad38 service nova] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Updating instance_info_cache with network_info: [{"id": "ecc3d187-2e74-4aab-9518-f073b26b0101", "address": "fa:16:3e:37:61:c7", "network": {"id": "14e5b992-c393-4acc-ab6d-ad5983fe2729", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1321568807-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee188ea80c9847188df8b8482b7c6ec7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapecc3d187-2e", "ovs_interfaceid": "ecc3d187-2e74-4aab-9518-f073b26b0101", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1074.384067] env[69994]: DEBUG oslo_vmware.api [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52d09eee-461a-9eda-a729-c626e8f3a969, 'name': SearchDatastore_Task, 'duration_secs': 0.016734} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.384397] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1074.384632] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1074.384870] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1074.385021] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1074.385203] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1074.385730] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c9e12dc3-98dd-4b63-96c7-6f44c13515b9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.396910] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1074.397393] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1074.397998] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1eb998ee-7b52-4876-84ef-f44672629f19 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.404168] env[69994]: DEBUG oslo_vmware.api [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 1074.404168] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]528cd604-f444-b732-0b91-d936b416bf1a" [ 1074.404168] env[69994]: _type = "Task" [ 1074.404168] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.413209] env[69994]: DEBUG oslo_vmware.api [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]528cd604-f444-b732-0b91-d936b416bf1a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.514671] env[69994]: DEBUG nova.scheduler.client.report [None req-335c38ea-237c-4ee6-b277-fd942030f610 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1074.710125] env[69994]: DEBUG oslo_concurrency.lockutils [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Acquiring lock "dd5ebc73-5866-4a5b-9d4f-aac721b0da8d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1074.710628] env[69994]: DEBUG oslo_concurrency.lockutils [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Lock "dd5ebc73-5866-4a5b-9d4f-aac721b0da8d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1074.717141] env[69994]: DEBUG oslo_vmware.api [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242474, 'name': ReconfigVM_Task, 'duration_secs': 0.527576} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.718141] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Reconfigured VM instance instance-0000005c to attach disk [datastore1] fc31da72-d09e-415e-9866-3e7fc91fec79/fc31da72-d09e-415e-9866-3e7fc91fec79.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1074.719911] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9f9dafa5-1dfb-4008-a35c-aa1735d04177 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.731025] env[69994]: DEBUG oslo_vmware.api [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1074.731025] env[69994]: value = "task-3242475" [ 1074.731025] env[69994]: _type = "Task" [ 1074.731025] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.738827] env[69994]: DEBUG oslo_vmware.api [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242475, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.748284] env[69994]: DEBUG nova.network.neutron [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Port 92378003-993a-43f2-8823-55a4b83acdef binding to destination host cpu-1 is already ACTIVE {{(pid=69994) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1074.877228] env[69994]: DEBUG oslo_concurrency.lockutils [req-9a37bd58-ce9e-4fd8-a293-c9a87f9d2bb5 req-efe2caaf-b8da-4ef3-83bc-2375c15bad38 service nova] Releasing lock "refresh_cache-1a5b269f-5ee8-4bcc-812e-78388edb1e50" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1074.918516] env[69994]: DEBUG oslo_vmware.api [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]528cd604-f444-b732-0b91-d936b416bf1a, 'name': SearchDatastore_Task, 'duration_secs': 0.01008} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.919411] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a1570cd-fa82-4805-b06c-5916200df7ed {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.924968] env[69994]: DEBUG oslo_vmware.api [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 1074.924968] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52a71ce8-127f-28d2-5433-d389d9ea5f80" [ 1074.924968] env[69994]: _type = "Task" [ 1074.924968] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.933534] env[69994]: DEBUG oslo_vmware.api [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52a71ce8-127f-28d2-5433-d389d9ea5f80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.023853] env[69994]: DEBUG oslo_concurrency.lockutils [None req-335c38ea-237c-4ee6-b277-fd942030f610 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.942s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1075.024464] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 9.028s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1075.050175] env[69994]: INFO nova.scheduler.client.report [None req-335c38ea-237c-4ee6-b277-fd942030f610 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Deleted allocations for instance 29071eb9-6334-4c23-acb4-142c12aa448d [ 1075.106822] env[69994]: DEBUG nova.compute.manager [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1075.131080] env[69994]: DEBUG nova.virt.hardware [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1075.131353] env[69994]: DEBUG nova.virt.hardware [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1075.131531] env[69994]: DEBUG nova.virt.hardware [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1075.131719] env[69994]: DEBUG nova.virt.hardware [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1075.131883] env[69994]: DEBUG nova.virt.hardware [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1075.132050] env[69994]: DEBUG nova.virt.hardware [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1075.132284] env[69994]: DEBUG nova.virt.hardware [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1075.132444] env[69994]: DEBUG nova.virt.hardware [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1075.132613] env[69994]: DEBUG nova.virt.hardware [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1075.132790] env[69994]: DEBUG nova.virt.hardware [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1075.132966] env[69994]: DEBUG nova.virt.hardware [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1075.133872] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fb27311-3e6d-4937-96d5-4529d27a9aa0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.142682] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-869de306-8ff3-4fb6-81ed-dade20472810 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.218729] env[69994]: DEBUG nova.compute.manager [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1075.238558] env[69994]: DEBUG oslo_vmware.api [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242475, 'name': Rename_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.437056] env[69994]: DEBUG oslo_vmware.api [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52a71ce8-127f-28d2-5433-d389d9ea5f80, 'name': SearchDatastore_Task, 'duration_secs': 0.018161} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.437056] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1075.437056] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 1a5b269f-5ee8-4bcc-812e-78388edb1e50/1a5b269f-5ee8-4bcc-812e-78388edb1e50.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1075.437363] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5f427b2a-33e9-4cba-adbe-0f0011438fb8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.444040] env[69994]: DEBUG oslo_vmware.api [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 1075.444040] env[69994]: value = "task-3242476" [ 1075.444040] env[69994]: _type = "Task" [ 1075.444040] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.451526] env[69994]: DEBUG oslo_vmware.api [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242476, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.531522] env[69994]: INFO nova.compute.claims [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1075.559042] env[69994]: DEBUG oslo_concurrency.lockutils [None req-335c38ea-237c-4ee6-b277-fd942030f610 tempest-AttachVolumeTestJSON-729346381 tempest-AttachVolumeTestJSON-729346381-project-member] Lock "29071eb9-6334-4c23-acb4-142c12aa448d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.651s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1075.672037] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4bd96866-6624-4bda-b758-c5a51d667a70 tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Acquiring lock "0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1075.672373] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4bd96866-6624-4bda-b758-c5a51d667a70 tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Lock "0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1075.672632] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4bd96866-6624-4bda-b758-c5a51d667a70 tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Acquiring lock "0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1075.675123] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4bd96866-6624-4bda-b758-c5a51d667a70 tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Lock "0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1075.675451] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4bd96866-6624-4bda-b758-c5a51d667a70 tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Lock "0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.002s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1075.677762] env[69994]: INFO nova.compute.manager [None req-4bd96866-6624-4bda-b758-c5a51d667a70 tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Terminating instance [ 1075.740120] env[69994]: DEBUG oslo_vmware.api [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242475, 'name': Rename_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.743360] env[69994]: DEBUG oslo_concurrency.lockutils [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1075.772124] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "ab99499b-21a2-465b-9975-4e0adb18df94-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1075.772727] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "ab99499b-21a2-465b-9975-4e0adb18df94-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1075.773061] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "ab99499b-21a2-465b-9975-4e0adb18df94-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1075.954453] env[69994]: DEBUG oslo_vmware.api [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242476, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.463229} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.954721] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 1a5b269f-5ee8-4bcc-812e-78388edb1e50/1a5b269f-5ee8-4bcc-812e-78388edb1e50.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1075.954937] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1075.955237] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3e6a912c-de3f-4e85-bb1e-b5f23e2b3775 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.960862] env[69994]: DEBUG oslo_vmware.api [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 1075.960862] env[69994]: value = "task-3242477" [ 1075.960862] env[69994]: _type = "Task" [ 1075.960862] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.968405] env[69994]: DEBUG oslo_vmware.api [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242477, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.039141] env[69994]: INFO nova.compute.resource_tracker [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Updating resource usage from migration 333534b2-bb77-4949-8bf8-98da2d12ec07 [ 1076.185286] env[69994]: DEBUG nova.compute.manager [None req-4bd96866-6624-4bda-b758-c5a51d667a70 tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1076.185512] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4bd96866-6624-4bda-b758-c5a51d667a70 tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1076.186386] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af048dd9-b4e2-4514-b046-e4d01b484275 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.195435] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bd96866-6624-4bda-b758-c5a51d667a70 tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1076.195990] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-126ab024-608a-4274-897a-73093717bc6f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.201336] env[69994]: DEBUG oslo_vmware.rw_handles [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528daad8-5576-735d-1c54-a4d54e61b02b/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1076.202181] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7b63292-3c7b-4319-8121-babba2b33b38 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.208161] env[69994]: DEBUG oslo_vmware.api [None req-4bd96866-6624-4bda-b758-c5a51d667a70 tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Waiting for the task: (returnval){ [ 1076.208161] env[69994]: value = "task-3242478" [ 1076.208161] env[69994]: _type = "Task" [ 1076.208161] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.213442] env[69994]: DEBUG oslo_vmware.rw_handles [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528daad8-5576-735d-1c54-a4d54e61b02b/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1076.213606] env[69994]: ERROR oslo_vmware.rw_handles [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528daad8-5576-735d-1c54-a4d54e61b02b/disk-0.vmdk due to incomplete transfer. [ 1076.214383] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-8255cc9d-6bd7-4ec5-97d3-cfb113581753 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.218811] env[69994]: DEBUG oslo_vmware.api [None req-4bd96866-6624-4bda-b758-c5a51d667a70 tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Task: {'id': task-3242478, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.222327] env[69994]: DEBUG oslo_vmware.rw_handles [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528daad8-5576-735d-1c54-a4d54e61b02b/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1076.222517] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Uploaded image a9ff1b20-7b93-4d2f-b2ea-ac788dd27141 to the Glance image server {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1076.224965] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Destroying the VM {{(pid=69994) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1076.225215] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-322db519-bba2-4815-855f-9b08e084d6e7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.230371] env[69994]: DEBUG oslo_vmware.api [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1076.230371] env[69994]: value = "task-3242479" [ 1076.230371] env[69994]: _type = "Task" [ 1076.230371] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.242891] env[69994]: DEBUG oslo_vmware.api [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242475, 'name': Rename_Task, 'duration_secs': 1.239943} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.245961] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1076.246234] env[69994]: DEBUG oslo_vmware.api [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242479, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.246441] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bfb83397-1cfa-4a07-b11c-3f3c546bb7b6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.251902] env[69994]: DEBUG oslo_vmware.api [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1076.251902] env[69994]: value = "task-3242480" [ 1076.251902] env[69994]: _type = "Task" [ 1076.251902] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.260013] env[69994]: DEBUG oslo_vmware.api [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242480, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.294376] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64f40cb6-99c4-4b79-892e-3cb848e57bc5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.302232] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-969a3c30-7b11-42a4-a8f7-f431e0a20d44 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.340896] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7be517b9-6f16-4a3e-bd44-1da8fb7c0682 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.344869] env[69994]: DEBUG nova.compute.manager [req-0c9301c6-459e-4088-8774-8a893dbe2d0d req-6475d981-d473-45d1-af48-496dfe2243ec service nova] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Received event network-vif-plugged-1a71caa6-eaba-4605-b4de-9df7bfa68007 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1076.346185] env[69994]: DEBUG oslo_concurrency.lockutils [req-0c9301c6-459e-4088-8774-8a893dbe2d0d req-6475d981-d473-45d1-af48-496dfe2243ec service nova] Acquiring lock "e6acdc45-5e8f-4ff0-9259-3de73a6fdd14-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1076.346185] env[69994]: DEBUG oslo_concurrency.lockutils [req-0c9301c6-459e-4088-8774-8a893dbe2d0d req-6475d981-d473-45d1-af48-496dfe2243ec service nova] Lock "e6acdc45-5e8f-4ff0-9259-3de73a6fdd14-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1076.346185] env[69994]: DEBUG oslo_concurrency.lockutils [req-0c9301c6-459e-4088-8774-8a893dbe2d0d req-6475d981-d473-45d1-af48-496dfe2243ec service nova] Lock "e6acdc45-5e8f-4ff0-9259-3de73a6fdd14-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1076.346185] env[69994]: DEBUG nova.compute.manager [req-0c9301c6-459e-4088-8774-8a893dbe2d0d req-6475d981-d473-45d1-af48-496dfe2243ec service nova] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] No waiting events found dispatching network-vif-plugged-1a71caa6-eaba-4605-b4de-9df7bfa68007 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1076.346185] env[69994]: WARNING nova.compute.manager [req-0c9301c6-459e-4088-8774-8a893dbe2d0d req-6475d981-d473-45d1-af48-496dfe2243ec service nova] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Received unexpected event network-vif-plugged-1a71caa6-eaba-4605-b4de-9df7bfa68007 for instance with vm_state building and task_state spawning. [ 1076.351591] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6aa1cf8-d8a9-4c99-a4db-ef07d3cbc5a8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.366938] env[69994]: DEBUG nova.compute.provider_tree [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1076.471684] env[69994]: DEBUG oslo_vmware.api [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242477, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090187} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.472103] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1076.472915] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dde94131-4c24-44f9-99be-48052b972ec4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.501089] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] 1a5b269f-5ee8-4bcc-812e-78388edb1e50/1a5b269f-5ee8-4bcc-812e-78388edb1e50.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1076.502116] env[69994]: DEBUG nova.network.neutron [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Successfully updated port: 1a71caa6-eaba-4605-b4de-9df7bfa68007 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1076.503341] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-716aaf32-d1d7-4947-9ed5-60abf6b0f7a5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.520197] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "refresh_cache-e6acdc45-5e8f-4ff0-9259-3de73a6fdd14" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1076.520521] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquired lock "refresh_cache-e6acdc45-5e8f-4ff0-9259-3de73a6fdd14" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1076.520521] env[69994]: DEBUG nova.network.neutron [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1076.527725] env[69994]: DEBUG oslo_vmware.api [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 1076.527725] env[69994]: value = "task-3242482" [ 1076.527725] env[69994]: _type = "Task" [ 1076.527725] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.536968] env[69994]: DEBUG oslo_vmware.api [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242482, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.719513] env[69994]: DEBUG oslo_vmware.api [None req-4bd96866-6624-4bda-b758-c5a51d667a70 tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Task: {'id': task-3242478, 'name': PowerOffVM_Task, 'duration_secs': 0.23126} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.719777] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bd96866-6624-4bda-b758-c5a51d667a70 tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1076.719945] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4bd96866-6624-4bda-b758-c5a51d667a70 tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1076.720202] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c94615dc-dea0-4aa6-b195-cc0b5519d2e6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.742564] env[69994]: DEBUG oslo_vmware.api [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242479, 'name': Destroy_Task, 'duration_secs': 0.339715} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.742824] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Destroyed the VM [ 1076.743080] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Deleting Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1076.743331] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-ec24ee9d-d266-4c69-b851-443cb56cf16a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.756620] env[69994]: DEBUG oslo_vmware.api [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1076.756620] env[69994]: value = "task-3242484" [ 1076.756620] env[69994]: _type = "Task" [ 1076.756620] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.766583] env[69994]: DEBUG oslo_vmware.api [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242480, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.771630] env[69994]: DEBUG oslo_vmware.api [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242484, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.816823] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "refresh_cache-ab99499b-21a2-465b-9975-4e0adb18df94" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1076.817038] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquired lock "refresh_cache-ab99499b-21a2-465b-9975-4e0adb18df94" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1076.817199] env[69994]: DEBUG nova.network.neutron [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1076.854034] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4bd96866-6624-4bda-b758-c5a51d667a70 tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1076.854034] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4bd96866-6624-4bda-b758-c5a51d667a70 tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1076.854034] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-4bd96866-6624-4bda-b758-c5a51d667a70 tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Deleting the datastore file [datastore1] 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1076.854295] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-39eb42aa-a3f3-43b1-889f-1633816d1179 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.861598] env[69994]: DEBUG oslo_vmware.api [None req-4bd96866-6624-4bda-b758-c5a51d667a70 tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Waiting for the task: (returnval){ [ 1076.861598] env[69994]: value = "task-3242485" [ 1076.861598] env[69994]: _type = "Task" [ 1076.861598] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.872236] env[69994]: DEBUG oslo_vmware.api [None req-4bd96866-6624-4bda-b758-c5a51d667a70 tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Task: {'id': task-3242485, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.890334] env[69994]: ERROR nova.scheduler.client.report [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [req-c08f6d6a-b358-4f95-9033-e96c552a4ae5] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 119, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 92ce3c95-4efe-4d04-802b-6b187afc5aa7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c08f6d6a-b358-4f95-9033-e96c552a4ae5"}]} [ 1076.910346] env[69994]: DEBUG nova.scheduler.client.report [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Refreshing inventories for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1076.925233] env[69994]: DEBUG nova.scheduler.client.report [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Updating ProviderTree inventory for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1076.925540] env[69994]: DEBUG nova.compute.provider_tree [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1076.937636] env[69994]: DEBUG nova.scheduler.client.report [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Refreshing aggregate associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1076.955853] env[69994]: DEBUG nova.scheduler.client.report [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Refreshing trait associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1077.037222] env[69994]: DEBUG oslo_vmware.api [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242482, 'name': ReconfigVM_Task, 'duration_secs': 0.457977} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.039865] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Reconfigured VM instance instance-0000005e to attach disk [datastore1] 1a5b269f-5ee8-4bcc-812e-78388edb1e50/1a5b269f-5ee8-4bcc-812e-78388edb1e50.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1077.040781] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c72df40d-d224-4ce2-8f7d-c2767106ddee {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.047080] env[69994]: DEBUG oslo_vmware.api [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 1077.047080] env[69994]: value = "task-3242486" [ 1077.047080] env[69994]: _type = "Task" [ 1077.047080] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.060619] env[69994]: DEBUG oslo_vmware.api [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242486, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.087023] env[69994]: DEBUG nova.network.neutron [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1077.187397] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d67ba5f4-8de6-4b5a-9f6a-b04efc056624 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.197228] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3183d1d-0d34-4fad-ad51-ede867788100 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.229180] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7b2f20f-567d-46a4-93e4-937f50fabba3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.236902] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5c59ce4-1434-4e85-87ef-c738336077c3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.250300] env[69994]: DEBUG nova.compute.provider_tree [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1077.260497] env[69994]: DEBUG oslo_vmware.api [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242480, 'name': PowerOnVM_Task, 'duration_secs': 0.519066} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.265821] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1077.266058] env[69994]: DEBUG nova.compute.manager [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1077.269236] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4409961b-e855-4b01-87ab-9270431638c4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.276576] env[69994]: DEBUG oslo_vmware.api [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242484, 'name': RemoveSnapshot_Task, 'duration_secs': 0.452917} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.278121] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Deleted Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1077.278358] env[69994]: INFO nova.compute.manager [None req-da2cc310-cc90-4394-ac43-a8e76a754ffc tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Took 12.79 seconds to snapshot the instance on the hypervisor. [ 1077.323072] env[69994]: DEBUG nova.network.neutron [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Updating instance_info_cache with network_info: [{"id": "1a71caa6-eaba-4605-b4de-9df7bfa68007", "address": "fa:16:3e:6e:82:b8", "network": {"id": "ac8cc640-01c3-4a64-8b69-1458ee2131a1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1685085861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38d5a89ed7c248c3be506ef12caf5f1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a71caa6-ea", "ovs_interfaceid": "1a71caa6-eaba-4605-b4de-9df7bfa68007", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1077.371899] env[69994]: DEBUG oslo_vmware.api [None req-4bd96866-6624-4bda-b758-c5a51d667a70 tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Task: {'id': task-3242485, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149031} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.372161] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-4bd96866-6624-4bda-b758-c5a51d667a70 tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1077.372345] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4bd96866-6624-4bda-b758-c5a51d667a70 tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1077.372522] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4bd96866-6624-4bda-b758-c5a51d667a70 tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1077.372692] env[69994]: INFO nova.compute.manager [None req-4bd96866-6624-4bda-b758-c5a51d667a70 tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1077.373015] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4bd96866-6624-4bda-b758-c5a51d667a70 tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1077.373243] env[69994]: DEBUG nova.compute.manager [-] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1077.373369] env[69994]: DEBUG nova.network.neutron [-] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1077.563456] env[69994]: DEBUG oslo_vmware.api [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242486, 'name': Rename_Task, 'duration_secs': 0.383198} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.564070] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1077.564577] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-57f6b478-52a6-4523-bc59-9923dc12f17e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.571019] env[69994]: DEBUG oslo_vmware.api [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 1077.571019] env[69994]: value = "task-3242487" [ 1077.571019] env[69994]: _type = "Task" [ 1077.571019] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.579341] env[69994]: DEBUG oslo_vmware.api [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242487, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.591839] env[69994]: DEBUG nova.network.neutron [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Updating instance_info_cache with network_info: [{"id": "92378003-993a-43f2-8823-55a4b83acdef", "address": "fa:16:3e:a4:fe:80", "network": {"id": "0e925a40-2706-4137-8dd4-eeed26ae606e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1885024452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "377f65074c2442588aee091b5165e1cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92378003-99", "ovs_interfaceid": "92378003-993a-43f2-8823-55a4b83acdef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1077.757077] env[69994]: DEBUG nova.scheduler.client.report [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1077.786674] env[69994]: INFO nova.compute.manager [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] bringing vm to original state: 'stopped' [ 1077.825788] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Releasing lock "refresh_cache-e6acdc45-5e8f-4ff0-9259-3de73a6fdd14" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1077.826112] env[69994]: DEBUG nova.compute.manager [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Instance network_info: |[{"id": "1a71caa6-eaba-4605-b4de-9df7bfa68007", "address": "fa:16:3e:6e:82:b8", "network": {"id": "ac8cc640-01c3-4a64-8b69-1458ee2131a1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1685085861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38d5a89ed7c248c3be506ef12caf5f1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a71caa6-ea", "ovs_interfaceid": "1a71caa6-eaba-4605-b4de-9df7bfa68007", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1077.826518] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:82:b8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ddfb706a-add1-4e16-9ac4-d20b16a1df6d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1a71caa6-eaba-4605-b4de-9df7bfa68007', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1077.834701] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1077.835172] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1077.835406] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-708692e0-9fc5-4052-bf65-360c163d91bd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.855439] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1077.855439] env[69994]: value = "task-3242488" [ 1077.855439] env[69994]: _type = "Task" [ 1077.855439] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.863039] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242488, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.025051] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3a497d92-fea4-462c-be0d-0ae703185b1a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "f00662a9-92e0-4520-9ced-3cfd6e83628b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1078.025390] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3a497d92-fea4-462c-be0d-0ae703185b1a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "f00662a9-92e0-4520-9ced-3cfd6e83628b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1078.025655] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3a497d92-fea4-462c-be0d-0ae703185b1a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "f00662a9-92e0-4520-9ced-3cfd6e83628b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1078.026125] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3a497d92-fea4-462c-be0d-0ae703185b1a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "f00662a9-92e0-4520-9ced-3cfd6e83628b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1078.026179] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3a497d92-fea4-462c-be0d-0ae703185b1a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "f00662a9-92e0-4520-9ced-3cfd6e83628b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1078.028480] env[69994]: INFO nova.compute.manager [None req-3a497d92-fea4-462c-be0d-0ae703185b1a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Terminating instance [ 1078.080501] env[69994]: DEBUG oslo_vmware.api [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242487, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.095320] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Releasing lock "refresh_cache-ab99499b-21a2-465b-9975-4e0adb18df94" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1078.264088] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 3.239s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1078.264088] env[69994]: INFO nova.compute.manager [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Migrating [ 1078.270096] env[69994]: DEBUG nova.network.neutron [-] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1078.271156] env[69994]: DEBUG oslo_concurrency.lockutils [None req-608ef843-d541-4a48-a2e5-1969a4dba344 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.838s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1078.271431] env[69994]: DEBUG nova.objects.instance [None req-608ef843-d541-4a48-a2e5-1969a4dba344 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Lazy-loading 'resources' on Instance uuid 384889a3-c3d9-4e0e-8d1c-95193cf4343d {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1078.365513] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242488, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.393159] env[69994]: DEBUG nova.compute.manager [req-1f796396-f8e0-47be-94ff-5e8afffcec41 req-30aa286d-06d5-4ee1-9d36-a39dc52bc856 service nova] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Received event network-changed-1a71caa6-eaba-4605-b4de-9df7bfa68007 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1078.393159] env[69994]: DEBUG nova.compute.manager [req-1f796396-f8e0-47be-94ff-5e8afffcec41 req-30aa286d-06d5-4ee1-9d36-a39dc52bc856 service nova] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Refreshing instance network info cache due to event network-changed-1a71caa6-eaba-4605-b4de-9df7bfa68007. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1078.393314] env[69994]: DEBUG oslo_concurrency.lockutils [req-1f796396-f8e0-47be-94ff-5e8afffcec41 req-30aa286d-06d5-4ee1-9d36-a39dc52bc856 service nova] Acquiring lock "refresh_cache-e6acdc45-5e8f-4ff0-9259-3de73a6fdd14" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1078.393465] env[69994]: DEBUG oslo_concurrency.lockutils [req-1f796396-f8e0-47be-94ff-5e8afffcec41 req-30aa286d-06d5-4ee1-9d36-a39dc52bc856 service nova] Acquired lock "refresh_cache-e6acdc45-5e8f-4ff0-9259-3de73a6fdd14" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1078.393651] env[69994]: DEBUG nova.network.neutron [req-1f796396-f8e0-47be-94ff-5e8afffcec41 req-30aa286d-06d5-4ee1-9d36-a39dc52bc856 service nova] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Refreshing network info cache for port 1a71caa6-eaba-4605-b4de-9df7bfa68007 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1078.532173] env[69994]: DEBUG nova.compute.manager [None req-3a497d92-fea4-462c-be0d-0ae703185b1a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1078.532468] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3a497d92-fea4-462c-be0d-0ae703185b1a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1078.533489] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fba87ec-3e92-496d-a7f7-cad2b6291648 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.542297] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3a497d92-fea4-462c-be0d-0ae703185b1a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1078.542590] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-79b855c9-50d5-4b6a-8604-cdd51e3a0617 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.561399] env[69994]: DEBUG oslo_concurrency.lockutils [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "19fedc80-8def-426a-af73-ad871e127e02" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1078.561666] env[69994]: DEBUG oslo_concurrency.lockutils [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "19fedc80-8def-426a-af73-ad871e127e02" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1078.581486] env[69994]: DEBUG oslo_vmware.api [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242487, 'name': PowerOnVM_Task, 'duration_secs': 0.831726} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.581710] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1078.581917] env[69994]: INFO nova.compute.manager [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Took 7.77 seconds to spawn the instance on the hypervisor. [ 1078.582123] env[69994]: DEBUG nova.compute.manager [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1078.582873] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b4e0686-3de4-40ba-aa9b-f2305ba040b2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.609272] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3a497d92-fea4-462c-be0d-0ae703185b1a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1078.609523] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3a497d92-fea4-462c-be0d-0ae703185b1a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1078.609727] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a497d92-fea4-462c-be0d-0ae703185b1a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Deleting the datastore file [datastore2] f00662a9-92e0-4520-9ced-3cfd6e83628b {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1078.609998] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-785c8ece-1b53-4b51-a0f3-eeee95956cf1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.617560] env[69994]: DEBUG oslo_vmware.api [None req-3a497d92-fea4-462c-be0d-0ae703185b1a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1078.617560] env[69994]: value = "task-3242491" [ 1078.617560] env[69994]: _type = "Task" [ 1078.617560] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.621997] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12872c07-827d-49bf-b048-3a3572e1d184 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.629195] env[69994]: DEBUG oslo_vmware.api [None req-3a497d92-fea4-462c-be0d-0ae703185b1a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242491, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.645381] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10952557-d729-4aac-819d-9baeacdb5319 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.653835] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Updating instance 'ab99499b-21a2-465b-9975-4e0adb18df94' progress to 83 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1078.777780] env[69994]: INFO nova.compute.manager [-] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Took 1.40 seconds to deallocate network for instance. [ 1078.784640] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "refresh_cache-e17fcc84-7c86-41b6-88ec-8a35619534b6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1078.784889] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquired lock "refresh_cache-e17fcc84-7c86-41b6-88ec-8a35619534b6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1078.784992] env[69994]: DEBUG nova.network.neutron [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1078.794673] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "fc31da72-d09e-415e-9866-3e7fc91fec79" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1078.795394] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "fc31da72-d09e-415e-9866-3e7fc91fec79" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1078.795394] env[69994]: DEBUG nova.compute.manager [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1078.796043] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b802267-7a7e-4a29-96c9-1012bee8601d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.805258] env[69994]: DEBUG nova.compute.manager [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69994) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1078.864228] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242488, 'name': CreateVM_Task, 'duration_secs': 0.526259} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.866654] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1078.867689] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1078.867911] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1078.868339] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1078.868656] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a2cb853-b3c6-41a8-ae40-b073d21e3214 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.873502] env[69994]: DEBUG oslo_vmware.api [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1078.873502] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]527f5f49-4cbb-2efe-5757-9be4eacceb46" [ 1078.873502] env[69994]: _type = "Task" [ 1078.873502] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.882949] env[69994]: DEBUG oslo_vmware.api [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]527f5f49-4cbb-2efe-5757-9be4eacceb46, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.986468] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48968526-57ca-40ee-bc54-ce3e7f0a0820 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.993920] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-384c849b-5f70-4a11-b882-23f344942376 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.025036] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d896b9f-11a7-44c1-86bd-dcc3ab2ab879 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.032499] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d95755bb-e8bb-4bca-9378-12944ff4cca4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.046589] env[69994]: DEBUG nova.compute.provider_tree [None req-608ef843-d541-4a48-a2e5-1969a4dba344 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1079.065723] env[69994]: DEBUG nova.compute.manager [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1079.096851] env[69994]: INFO nova.compute.manager [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Took 23.45 seconds to build instance. [ 1079.117918] env[69994]: DEBUG nova.network.neutron [req-1f796396-f8e0-47be-94ff-5e8afffcec41 req-30aa286d-06d5-4ee1-9d36-a39dc52bc856 service nova] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Updated VIF entry in instance network info cache for port 1a71caa6-eaba-4605-b4de-9df7bfa68007. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1079.118304] env[69994]: DEBUG nova.network.neutron [req-1f796396-f8e0-47be-94ff-5e8afffcec41 req-30aa286d-06d5-4ee1-9d36-a39dc52bc856 service nova] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Updating instance_info_cache with network_info: [{"id": "1a71caa6-eaba-4605-b4de-9df7bfa68007", "address": "fa:16:3e:6e:82:b8", "network": {"id": "ac8cc640-01c3-4a64-8b69-1458ee2131a1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1685085861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38d5a89ed7c248c3be506ef12caf5f1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a71caa6-ea", "ovs_interfaceid": "1a71caa6-eaba-4605-b4de-9df7bfa68007", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1079.128077] env[69994]: DEBUG oslo_vmware.api [None req-3a497d92-fea4-462c-be0d-0ae703185b1a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242491, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138811} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.128358] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a497d92-fea4-462c-be0d-0ae703185b1a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1079.128586] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3a497d92-fea4-462c-be0d-0ae703185b1a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1079.129016] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3a497d92-fea4-462c-be0d-0ae703185b1a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1079.129090] env[69994]: INFO nova.compute.manager [None req-3a497d92-fea4-462c-be0d-0ae703185b1a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1079.129389] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3a497d92-fea4-462c-be0d-0ae703185b1a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1079.129905] env[69994]: DEBUG nova.compute.manager [-] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1079.130061] env[69994]: DEBUG nova.network.neutron [-] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1079.159811] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1079.160087] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5ba45e6d-2878-4ef7-be94-6ddacafb390c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.168013] env[69994]: DEBUG oslo_vmware.api [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1079.168013] env[69994]: value = "task-3242492" [ 1079.168013] env[69994]: _type = "Task" [ 1079.168013] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.174754] env[69994]: DEBUG oslo_vmware.api [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242492, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.292345] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4bd96866-6624-4bda-b758-c5a51d667a70 tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1079.310201] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1079.311151] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-019e2167-9465-4060-b764-8c9f6bccc7ee {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.319224] env[69994]: DEBUG oslo_vmware.api [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1079.319224] env[69994]: value = "task-3242493" [ 1079.319224] env[69994]: _type = "Task" [ 1079.319224] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.330507] env[69994]: DEBUG oslo_vmware.api [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242493, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.384807] env[69994]: DEBUG oslo_vmware.api [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]527f5f49-4cbb-2efe-5757-9be4eacceb46, 'name': SearchDatastore_Task, 'duration_secs': 0.009226} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.385318] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1079.385538] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1079.385780] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.385990] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1079.386272] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1079.386584] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-40eb2d0b-a46f-4180-865d-715eac61becf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.396468] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1079.396727] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1079.397501] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ccddf222-ae5b-465a-a1c5-9242db95e705 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.404517] env[69994]: DEBUG oslo_vmware.api [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1079.404517] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5230eaed-b167-0999-5599-5db481f2d9fc" [ 1079.404517] env[69994]: _type = "Task" [ 1079.404517] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.416187] env[69994]: DEBUG oslo_vmware.api [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5230eaed-b167-0999-5599-5db481f2d9fc, 'name': SearchDatastore_Task, 'duration_secs': 0.008391} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.417254] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1cb5234-9f0d-4532-8cd5-545b25dc1718 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.423435] env[69994]: DEBUG oslo_vmware.api [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1079.423435] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52a17adb-129b-210e-6b29-d6309de67aad" [ 1079.423435] env[69994]: _type = "Task" [ 1079.423435] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.438430] env[69994]: DEBUG oslo_vmware.api [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52a17adb-129b-210e-6b29-d6309de67aad, 'name': SearchDatastore_Task, 'duration_secs': 0.008134} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.441714] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1079.442088] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] e6acdc45-5e8f-4ff0-9259-3de73a6fdd14/e6acdc45-5e8f-4ff0-9259-3de73a6fdd14.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1079.442773] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f1cb4d2b-da2f-4f2f-ae2c-8f287433cbb4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.450394] env[69994]: DEBUG oslo_vmware.api [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1079.450394] env[69994]: value = "task-3242494" [ 1079.450394] env[69994]: _type = "Task" [ 1079.450394] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.460982] env[69994]: DEBUG oslo_vmware.api [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242494, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.549696] env[69994]: DEBUG nova.scheduler.client.report [None req-608ef843-d541-4a48-a2e5-1969a4dba344 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1079.564930] env[69994]: DEBUG nova.network.neutron [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Updating instance_info_cache with network_info: [{"id": "b2521bc7-942e-4d29-bc89-0fd13a02f783", "address": "fa:16:3e:90:f2:06", "network": {"id": "6d4a143e-4f55-4918-8454-462892aacf0e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1594130263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "605d72502cc644bfa4d875bf348246de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52e117d3-d120-42c6-8e72-70085845acbf", "external-id": "nsx-vlan-transportzone-934", "segmentation_id": 934, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2521bc7-94", "ovs_interfaceid": "b2521bc7-942e-4d29-bc89-0fd13a02f783", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1079.588684] env[69994]: DEBUG oslo_concurrency.lockutils [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1079.599050] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6e846394-fe65-4808-b6da-902e9f7be1db tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "1a5b269f-5ee8-4bcc-812e-78388edb1e50" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.964s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1079.623330] env[69994]: DEBUG oslo_concurrency.lockutils [req-1f796396-f8e0-47be-94ff-5e8afffcec41 req-30aa286d-06d5-4ee1-9d36-a39dc52bc856 service nova] Releasing lock "refresh_cache-e6acdc45-5e8f-4ff0-9259-3de73a6fdd14" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1079.623609] env[69994]: DEBUG nova.compute.manager [req-1f796396-f8e0-47be-94ff-5e8afffcec41 req-30aa286d-06d5-4ee1-9d36-a39dc52bc856 service nova] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Received event network-vif-deleted-5068e2e3-d196-4aea-8f08-ae8d9c0239cc {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1079.676739] env[69994]: DEBUG oslo_vmware.api [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242492, 'name': PowerOnVM_Task, 'duration_secs': 0.394241} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.677022] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1079.677216] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aea4f570-240f-4602-82bd-d3875804924a tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Updating instance 'ab99499b-21a2-465b-9975-4e0adb18df94' progress to 100 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1079.830385] env[69994]: DEBUG oslo_vmware.api [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242493, 'name': PowerOffVM_Task, 'duration_secs': 0.223272} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.830689] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1079.830977] env[69994]: DEBUG nova.compute.manager [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1079.831905] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dd77ed0-bcc2-42be-b4ba-782e0a402ab0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.932378] env[69994]: DEBUG nova.network.neutron [-] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1079.960760] env[69994]: DEBUG oslo_vmware.api [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242494, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.489594} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.961065] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] e6acdc45-5e8f-4ff0-9259-3de73a6fdd14/e6acdc45-5e8f-4ff0-9259-3de73a6fdd14.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1079.961285] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1079.961534] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3b1f8400-d528-4135-8249-a6c0b484c6c4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.967674] env[69994]: DEBUG oslo_vmware.api [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1079.967674] env[69994]: value = "task-3242495" [ 1079.967674] env[69994]: _type = "Task" [ 1079.967674] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.976286] env[69994]: DEBUG oslo_vmware.api [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242495, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.055050] env[69994]: DEBUG oslo_concurrency.lockutils [None req-608ef843-d541-4a48-a2e5-1969a4dba344 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.784s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1080.058066] env[69994]: DEBUG oslo_concurrency.lockutils [None req-86dbb778-48e9-4698-8c23-b943de5dd617 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.563s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1080.058066] env[69994]: DEBUG nova.objects.instance [None req-86dbb778-48e9-4698-8c23-b943de5dd617 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Lazy-loading 'resources' on Instance uuid 799bf051-86b4-45bd-b9bf-df767074dac8 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1080.067630] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Releasing lock "refresh_cache-e17fcc84-7c86-41b6-88ec-8a35619534b6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1080.076106] env[69994]: INFO nova.scheduler.client.report [None req-608ef843-d541-4a48-a2e5-1969a4dba344 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Deleted allocations for instance 384889a3-c3d9-4e0e-8d1c-95193cf4343d [ 1080.312050] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fd3d8a2-a26b-4c0c-862d-c38aa3e33605 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.317484] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b198781b-8d7e-48a7-a30e-b9e39cddd30c tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Suspending the VM {{(pid=69994) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1080.317712] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-7af489bf-684d-45d4-a7e0-a0166c8b1eab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.323524] env[69994]: DEBUG oslo_vmware.api [None req-b198781b-8d7e-48a7-a30e-b9e39cddd30c tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 1080.323524] env[69994]: value = "task-3242496" [ 1080.323524] env[69994]: _type = "Task" [ 1080.323524] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.331869] env[69994]: DEBUG oslo_vmware.api [None req-b198781b-8d7e-48a7-a30e-b9e39cddd30c tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242496, 'name': SuspendVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.343839] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "fc31da72-d09e-415e-9866-3e7fc91fec79" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.549s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1080.418664] env[69994]: DEBUG nova.compute.manager [req-26defe95-78f4-44ca-9a0a-a50c53bf2913 req-8db95b6f-513b-448b-b0b7-b090a93c3aeb service nova] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Received event network-vif-deleted-624ca4e4-692a-43e2-a267-b5692e21bb35 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1080.435378] env[69994]: INFO nova.compute.manager [-] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Took 1.31 seconds to deallocate network for instance. [ 1080.478071] env[69994]: DEBUG oslo_vmware.api [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242495, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061116} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.479147] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1080.479962] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eae4a825-edc8-4bc8-b140-ece124485ca3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.506498] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] e6acdc45-5e8f-4ff0-9259-3de73a6fdd14/e6acdc45-5e8f-4ff0-9259-3de73a6fdd14.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1080.507247] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e6d30f05-9fff-4555-9109-82b67f1924c1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.528542] env[69994]: DEBUG oslo_vmware.api [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1080.528542] env[69994]: value = "task-3242497" [ 1080.528542] env[69994]: _type = "Task" [ 1080.528542] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.537331] env[69994]: DEBUG oslo_vmware.api [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242497, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.588327] env[69994]: DEBUG oslo_concurrency.lockutils [None req-608ef843-d541-4a48-a2e5-1969a4dba344 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Lock "384889a3-c3d9-4e0e-8d1c-95193cf4343d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.558s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1080.829554] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-358a04cf-6f26-4103-8e1f-b029bbc8bc91 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.837642] env[69994]: DEBUG oslo_vmware.api [None req-b198781b-8d7e-48a7-a30e-b9e39cddd30c tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242496, 'name': SuspendVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.840564] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75da1c84-e062-44fc-8280-19692fff516d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.877055] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1080.877866] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61ef6fc6-590e-4643-9528-ea0607174b2b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.886135] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7baa44e4-9385-40f4-a2a9-bc87344cda5d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.901653] env[69994]: DEBUG nova.compute.provider_tree [None req-86dbb778-48e9-4698-8c23-b943de5dd617 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1080.941581] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3a497d92-fea4-462c-be0d-0ae703185b1a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1081.040214] env[69994]: DEBUG oslo_vmware.api [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242497, 'name': ReconfigVM_Task, 'duration_secs': 0.491981} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.040456] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Reconfigured VM instance instance-0000005f to attach disk [datastore1] e6acdc45-5e8f-4ff0-9259-3de73a6fdd14/e6acdc45-5e8f-4ff0-9259-3de73a6fdd14.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1081.041855] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e14a6b4a-00dc-40cd-aef1-7010cd19660f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.051017] env[69994]: DEBUG oslo_vmware.api [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1081.051017] env[69994]: value = "task-3242499" [ 1081.051017] env[69994]: _type = "Task" [ 1081.051017] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.059138] env[69994]: DEBUG oslo_vmware.api [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242499, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.334237] env[69994]: DEBUG oslo_vmware.api [None req-b198781b-8d7e-48a7-a30e-b9e39cddd30c tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242496, 'name': SuspendVM_Task, 'duration_secs': 0.679028} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.334501] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b198781b-8d7e-48a7-a30e-b9e39cddd30c tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Suspended the VM {{(pid=69994) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1081.334690] env[69994]: DEBUG nova.compute.manager [None req-b198781b-8d7e-48a7-a30e-b9e39cddd30c tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1081.335447] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7edaa2ef-7f14-4c29-b916-22f4cadf7e3f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.404418] env[69994]: DEBUG nova.scheduler.client.report [None req-86dbb778-48e9-4698-8c23-b943de5dd617 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1081.560189] env[69994]: DEBUG oslo_vmware.api [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242499, 'name': Rename_Task, 'duration_secs': 0.143795} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.560403] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1081.560634] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bb9fc759-d9f0-4707-ac50-4166c6b33812 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.565982] env[69994]: DEBUG oslo_vmware.api [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1081.565982] env[69994]: value = "task-3242500" [ 1081.565982] env[69994]: _type = "Task" [ 1081.565982] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.579920] env[69994]: DEBUG oslo_vmware.api [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242500, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.582057] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b6ceecc-a992-4b32-a683-14eec0df2fed {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.602814] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Updating instance 'e17fcc84-7c86-41b6-88ec-8a35619534b6' progress to 0 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1081.747422] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7229e759-3c35-4ed6-8c43-dec9f64a4d33 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "fc31da72-d09e-415e-9866-3e7fc91fec79" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1081.747784] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7229e759-3c35-4ed6-8c43-dec9f64a4d33 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "fc31da72-d09e-415e-9866-3e7fc91fec79" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1081.748105] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7229e759-3c35-4ed6-8c43-dec9f64a4d33 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "fc31da72-d09e-415e-9866-3e7fc91fec79-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1081.748354] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7229e759-3c35-4ed6-8c43-dec9f64a4d33 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "fc31da72-d09e-415e-9866-3e7fc91fec79-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1081.748610] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7229e759-3c35-4ed6-8c43-dec9f64a4d33 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "fc31da72-d09e-415e-9866-3e7fc91fec79-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1081.751129] env[69994]: INFO nova.compute.manager [None req-7229e759-3c35-4ed6-8c43-dec9f64a4d33 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Terminating instance [ 1081.782846] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aa0b7cab-5cd5-4373-89c6-a0b972244185 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "ab99499b-21a2-465b-9975-4e0adb18df94" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1081.783171] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aa0b7cab-5cd5-4373-89c6-a0b972244185 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "ab99499b-21a2-465b-9975-4e0adb18df94" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1081.783330] env[69994]: DEBUG nova.compute.manager [None req-aa0b7cab-5cd5-4373-89c6-a0b972244185 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Going to confirm migration 4 {{(pid=69994) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1081.909422] env[69994]: DEBUG oslo_concurrency.lockutils [None req-86dbb778-48e9-4698-8c23-b943de5dd617 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.852s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1081.912052] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f1d8c680-19dc-44ae-a325-3edb9e949dcb tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.723s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1081.912335] env[69994]: DEBUG nova.objects.instance [None req-f1d8c680-19dc-44ae-a325-3edb9e949dcb tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lazy-loading 'resources' on Instance uuid ef410b09-8686-409e-8391-d50cd0e0df04 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1081.934556] env[69994]: INFO nova.scheduler.client.report [None req-86dbb778-48e9-4698-8c23-b943de5dd617 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Deleted allocations for instance 799bf051-86b4-45bd-b9bf-df767074dac8 [ 1082.079079] env[69994]: DEBUG oslo_vmware.api [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242500, 'name': PowerOnVM_Task, 'duration_secs': 0.485527} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.079079] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1082.079079] env[69994]: INFO nova.compute.manager [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Took 6.97 seconds to spawn the instance on the hypervisor. [ 1082.079079] env[69994]: DEBUG nova.compute.manager [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1082.079079] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7120d24d-1235-4a10-bc7a-ecc817b61386 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.108655] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1082.109035] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-13a36c10-4157-4ff1-854b-499ba6e03ce6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.115389] env[69994]: DEBUG oslo_vmware.api [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1082.115389] env[69994]: value = "task-3242501" [ 1082.115389] env[69994]: _type = "Task" [ 1082.115389] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.123017] env[69994]: DEBUG oslo_vmware.api [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242501, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.254904] env[69994]: DEBUG nova.compute.manager [None req-7229e759-3c35-4ed6-8c43-dec9f64a4d33 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1082.254904] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7229e759-3c35-4ed6-8c43-dec9f64a4d33 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1082.255819] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad268b94-5ca7-4b79-8bee-ba6d8002ffb8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.264902] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7229e759-3c35-4ed6-8c43-dec9f64a4d33 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1082.265174] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a62bee55-2ddc-4214-89e6-a11b33fb7bd5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.349558] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aa0b7cab-5cd5-4373-89c6-a0b972244185 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "refresh_cache-ab99499b-21a2-465b-9975-4e0adb18df94" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.349756] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aa0b7cab-5cd5-4373-89c6-a0b972244185 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquired lock "refresh_cache-ab99499b-21a2-465b-9975-4e0adb18df94" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1082.349924] env[69994]: DEBUG nova.network.neutron [None req-aa0b7cab-5cd5-4373-89c6-a0b972244185 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1082.350111] env[69994]: DEBUG nova.objects.instance [None req-aa0b7cab-5cd5-4373-89c6-a0b972244185 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lazy-loading 'info_cache' on Instance uuid ab99499b-21a2-465b-9975-4e0adb18df94 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1082.351654] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7229e759-3c35-4ed6-8c43-dec9f64a4d33 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1082.352130] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7229e759-3c35-4ed6-8c43-dec9f64a4d33 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1082.352130] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-7229e759-3c35-4ed6-8c43-dec9f64a4d33 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Deleting the datastore file [datastore1] fc31da72-d09e-415e-9866-3e7fc91fec79 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1082.352855] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fa88512b-33c0-4543-9b05-8a2dad649c85 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.360387] env[69994]: DEBUG oslo_vmware.api [None req-7229e759-3c35-4ed6-8c43-dec9f64a4d33 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1082.360387] env[69994]: value = "task-3242503" [ 1082.360387] env[69994]: _type = "Task" [ 1082.360387] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.369150] env[69994]: DEBUG oslo_vmware.api [None req-7229e759-3c35-4ed6-8c43-dec9f64a4d33 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242503, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.443027] env[69994]: DEBUG oslo_concurrency.lockutils [None req-86dbb778-48e9-4698-8c23-b943de5dd617 tempest-ListServersNegativeTestJSON-241418158 tempest-ListServersNegativeTestJSON-241418158-project-member] Lock "799bf051-86b4-45bd-b9bf-df767074dac8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.320s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1082.595184] env[69994]: INFO nova.compute.manager [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Took 19.92 seconds to build instance. [ 1082.625660] env[69994]: DEBUG oslo_vmware.api [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242501, 'name': PowerOffVM_Task, 'duration_secs': 0.389782} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.626678] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1082.626866] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Updating instance 'e17fcc84-7c86-41b6-88ec-8a35619534b6' progress to 17 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1082.630407] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc6e69a9-2792-499f-a170-9fa2003875b1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.637620] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c13de275-5ed7-4700-ad0d-c8ec582035a3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.667037] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80403646-dc73-478e-92fe-c6c1d3b99361 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.674208] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe7a0064-87eb-4edf-978c-0e20591aa039 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.687422] env[69994]: DEBUG nova.compute.provider_tree [None req-f1d8c680-19dc-44ae-a325-3edb9e949dcb tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1082.855999] env[69994]: DEBUG oslo_concurrency.lockutils [None req-202ece9c-e47b-4fca-9d83-9e1e4bdbbb8c tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "1a5b269f-5ee8-4bcc-812e-78388edb1e50" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1082.855999] env[69994]: DEBUG oslo_concurrency.lockutils [None req-202ece9c-e47b-4fca-9d83-9e1e4bdbbb8c tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "1a5b269f-5ee8-4bcc-812e-78388edb1e50" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1082.855999] env[69994]: DEBUG oslo_concurrency.lockutils [None req-202ece9c-e47b-4fca-9d83-9e1e4bdbbb8c tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "1a5b269f-5ee8-4bcc-812e-78388edb1e50-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1082.855999] env[69994]: DEBUG oslo_concurrency.lockutils [None req-202ece9c-e47b-4fca-9d83-9e1e4bdbbb8c tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "1a5b269f-5ee8-4bcc-812e-78388edb1e50-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1082.855999] env[69994]: DEBUG oslo_concurrency.lockutils [None req-202ece9c-e47b-4fca-9d83-9e1e4bdbbb8c tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "1a5b269f-5ee8-4bcc-812e-78388edb1e50-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1082.858810] env[69994]: INFO nova.compute.manager [None req-202ece9c-e47b-4fca-9d83-9e1e4bdbbb8c tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Terminating instance [ 1082.871738] env[69994]: DEBUG oslo_vmware.api [None req-7229e759-3c35-4ed6-8c43-dec9f64a4d33 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242503, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.180026} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.872630] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-7229e759-3c35-4ed6-8c43-dec9f64a4d33 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1082.872823] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7229e759-3c35-4ed6-8c43-dec9f64a4d33 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1082.873010] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7229e759-3c35-4ed6-8c43-dec9f64a4d33 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1082.873201] env[69994]: INFO nova.compute.manager [None req-7229e759-3c35-4ed6-8c43-dec9f64a4d33 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1082.873461] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7229e759-3c35-4ed6-8c43-dec9f64a4d33 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1082.873831] env[69994]: DEBUG nova.compute.manager [-] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1082.873932] env[69994]: DEBUG nova.network.neutron [-] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1083.040022] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1083.040022] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1083.097609] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f9fa4f58-2586-4eae-8d43-b4ef2090cfae tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "e6acdc45-5e8f-4ff0-9259-3de73a6fdd14" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.437s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1083.136135] env[69994]: DEBUG nova.virt.hardware [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1083.136367] env[69994]: DEBUG nova.virt.hardware [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1083.136528] env[69994]: DEBUG nova.virt.hardware [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1083.136707] env[69994]: DEBUG nova.virt.hardware [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1083.136857] env[69994]: DEBUG nova.virt.hardware [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1083.139067] env[69994]: DEBUG nova.virt.hardware [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1083.139655] env[69994]: DEBUG nova.virt.hardware [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1083.139882] env[69994]: DEBUG nova.virt.hardware [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1083.140113] env[69994]: DEBUG nova.virt.hardware [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1083.140331] env[69994]: DEBUG nova.virt.hardware [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1083.140549] env[69994]: DEBUG nova.virt.hardware [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1083.149916] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ae4b43a6-fdc7-4e93-bc0a-f9f14c1edc63 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.168860] env[69994]: DEBUG oslo_vmware.api [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1083.168860] env[69994]: value = "task-3242504" [ 1083.168860] env[69994]: _type = "Task" [ 1083.168860] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.179643] env[69994]: DEBUG oslo_vmware.api [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242504, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.190131] env[69994]: DEBUG nova.scheduler.client.report [None req-f1d8c680-19dc-44ae-a325-3edb9e949dcb tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1083.197152] env[69994]: DEBUG nova.compute.manager [req-7d91a4c3-3219-47d6-94a0-07d547cdcc37 req-d1ee5e88-bd2c-4359-81cd-f7b4394a0f7f service nova] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Received event network-vif-deleted-49963331-a486-495f-a065-cbcd2c380941 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1083.197152] env[69994]: INFO nova.compute.manager [req-7d91a4c3-3219-47d6-94a0-07d547cdcc37 req-d1ee5e88-bd2c-4359-81cd-f7b4394a0f7f service nova] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Neutron deleted interface 49963331-a486-495f-a065-cbcd2c380941; detaching it from the instance and deleting it from the info cache [ 1083.197425] env[69994]: DEBUG nova.network.neutron [req-7d91a4c3-3219-47d6-94a0-07d547cdcc37 req-d1ee5e88-bd2c-4359-81cd-f7b4394a0f7f service nova] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1083.367343] env[69994]: DEBUG nova.compute.manager [None req-202ece9c-e47b-4fca-9d83-9e1e4bdbbb8c tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1083.367609] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-202ece9c-e47b-4fca-9d83-9e1e4bdbbb8c tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1083.368486] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-421a75ce-02af-4e00-99ea-82b23ff5fb34 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.376584] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-202ece9c-e47b-4fca-9d83-9e1e4bdbbb8c tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1083.376829] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-78032e35-c814-46ac-a809-6848b949de00 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.441629] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-202ece9c-e47b-4fca-9d83-9e1e4bdbbb8c tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1083.441891] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-202ece9c-e47b-4fca-9d83-9e1e4bdbbb8c tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1083.442106] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-202ece9c-e47b-4fca-9d83-9e1e4bdbbb8c tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Deleting the datastore file [datastore1] 1a5b269f-5ee8-4bcc-812e-78388edb1e50 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1083.442365] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-763bcda5-398f-41cd-a93e-ae17834b5fb1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.455626] env[69994]: DEBUG oslo_vmware.api [None req-202ece9c-e47b-4fca-9d83-9e1e4bdbbb8c tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 1083.455626] env[69994]: value = "task-3242506" [ 1083.455626] env[69994]: _type = "Task" [ 1083.455626] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.463852] env[69994]: DEBUG oslo_vmware.api [None req-202ece9c-e47b-4fca-9d83-9e1e4bdbbb8c tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242506, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.545903] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1083.546142] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1083.546323] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1083.546435] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1083.546534] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1083.549067] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1083.549067] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69994) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1083.549067] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1083.628111] env[69994]: DEBUG nova.network.neutron [-] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1083.679457] env[69994]: DEBUG oslo_vmware.api [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242504, 'name': ReconfigVM_Task, 'duration_secs': 0.304095} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.680646] env[69994]: DEBUG nova.network.neutron [None req-aa0b7cab-5cd5-4373-89c6-a0b972244185 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Updating instance_info_cache with network_info: [{"id": "92378003-993a-43f2-8823-55a4b83acdef", "address": "fa:16:3e:a4:fe:80", "network": {"id": "0e925a40-2706-4137-8dd4-eeed26ae606e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1885024452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "377f65074c2442588aee091b5165e1cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92378003-99", "ovs_interfaceid": "92378003-993a-43f2-8823-55a4b83acdef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1083.682048] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Updating instance 'e17fcc84-7c86-41b6-88ec-8a35619534b6' progress to 33 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1083.695737] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f1d8c680-19dc-44ae-a325-3edb9e949dcb tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.784s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1083.698562] env[69994]: DEBUG oslo_concurrency.lockutils [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.955s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1083.700065] env[69994]: INFO nova.compute.claims [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1083.703353] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e06b592f-58ab-488c-8e5a-95cf3eebebc9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.724435] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32884f53-3053-4678-b20c-e284219bc841 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.736631] env[69994]: INFO nova.scheduler.client.report [None req-f1d8c680-19dc-44ae-a325-3edb9e949dcb tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Deleted allocations for instance ef410b09-8686-409e-8391-d50cd0e0df04 [ 1083.760834] env[69994]: DEBUG nova.compute.manager [req-7d91a4c3-3219-47d6-94a0-07d547cdcc37 req-d1ee5e88-bd2c-4359-81cd-f7b4394a0f7f service nova] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Detach interface failed, port_id=49963331-a486-495f-a065-cbcd2c380941, reason: Instance fc31da72-d09e-415e-9866-3e7fc91fec79 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1083.967505] env[69994]: DEBUG oslo_vmware.api [None req-202ece9c-e47b-4fca-9d83-9e1e4bdbbb8c tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242506, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169035} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.967760] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-202ece9c-e47b-4fca-9d83-9e1e4bdbbb8c tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1083.967941] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-202ece9c-e47b-4fca-9d83-9e1e4bdbbb8c tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1083.968136] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-202ece9c-e47b-4fca-9d83-9e1e4bdbbb8c tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1083.968311] env[69994]: INFO nova.compute.manager [None req-202ece9c-e47b-4fca-9d83-9e1e4bdbbb8c tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1083.968602] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-202ece9c-e47b-4fca-9d83-9e1e4bdbbb8c tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1083.968825] env[69994]: DEBUG nova.compute.manager [-] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1083.968935] env[69994]: DEBUG nova.network.neutron [-] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1084.049818] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1084.130639] env[69994]: INFO nova.compute.manager [-] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Took 1.26 seconds to deallocate network for instance. [ 1084.187087] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aa0b7cab-5cd5-4373-89c6-a0b972244185 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Releasing lock "refresh_cache-ab99499b-21a2-465b-9975-4e0adb18df94" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1084.189518] env[69994]: DEBUG nova.objects.instance [None req-aa0b7cab-5cd5-4373-89c6-a0b972244185 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lazy-loading 'migration_context' on Instance uuid ab99499b-21a2-465b-9975-4e0adb18df94 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1084.192048] env[69994]: DEBUG nova.virt.hardware [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1084.192048] env[69994]: DEBUG nova.virt.hardware [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1084.192048] env[69994]: DEBUG nova.virt.hardware [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1084.192282] env[69994]: DEBUG nova.virt.hardware [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1084.192316] env[69994]: DEBUG nova.virt.hardware [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1084.192520] env[69994]: DEBUG nova.virt.hardware [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1084.192639] env[69994]: DEBUG nova.virt.hardware [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1084.192789] env[69994]: DEBUG nova.virt.hardware [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1084.193458] env[69994]: DEBUG nova.virt.hardware [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1084.194934] env[69994]: DEBUG nova.virt.hardware [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1084.194934] env[69994]: DEBUG nova.virt.hardware [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1084.199743] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Reconfiguring VM instance instance-0000004f to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1084.201485] env[69994]: DEBUG nova.objects.base [None req-aa0b7cab-5cd5-4373-89c6-a0b972244185 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1084.201779] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-405d92b7-26d8-4a48-bc21-d232b42f3012 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.215546] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fee9874-adc3-4f4b-83bd-3bde6da744b0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.239873] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e84898be-8e82-4bcb-9e54-2398c99090cd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.242716] env[69994]: DEBUG oslo_vmware.api [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1084.242716] env[69994]: value = "task-3242507" [ 1084.242716] env[69994]: _type = "Task" [ 1084.242716] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.249017] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f1d8c680-19dc-44ae-a325-3edb9e949dcb tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lock "ef410b09-8686-409e-8391-d50cd0e0df04" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.786s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1084.251306] env[69994]: DEBUG oslo_vmware.api [None req-aa0b7cab-5cd5-4373-89c6-a0b972244185 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1084.251306] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52a9813b-a41c-1f73-4a07-4baa738cfb2e" [ 1084.251306] env[69994]: _type = "Task" [ 1084.251306] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.259287] env[69994]: DEBUG oslo_vmware.api [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242507, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.265415] env[69994]: DEBUG oslo_vmware.api [None req-aa0b7cab-5cd5-4373-89c6-a0b972244185 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52a9813b-a41c-1f73-4a07-4baa738cfb2e, 'name': SearchDatastore_Task, 'duration_secs': 0.007221} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.265827] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aa0b7cab-5cd5-4373-89c6-a0b972244185 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1084.572226] env[69994]: DEBUG nova.compute.manager [req-3f0c1728-5f0b-4507-b7db-a3294dfb78c2 req-fa4b32d3-cd7f-4d0e-b5a3-4b28790742d6 service nova] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Received event network-changed-1a71caa6-eaba-4605-b4de-9df7bfa68007 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1084.572383] env[69994]: DEBUG nova.compute.manager [req-3f0c1728-5f0b-4507-b7db-a3294dfb78c2 req-fa4b32d3-cd7f-4d0e-b5a3-4b28790742d6 service nova] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Refreshing instance network info cache due to event network-changed-1a71caa6-eaba-4605-b4de-9df7bfa68007. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1084.572679] env[69994]: DEBUG oslo_concurrency.lockutils [req-3f0c1728-5f0b-4507-b7db-a3294dfb78c2 req-fa4b32d3-cd7f-4d0e-b5a3-4b28790742d6 service nova] Acquiring lock "refresh_cache-e6acdc45-5e8f-4ff0-9259-3de73a6fdd14" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.572834] env[69994]: DEBUG oslo_concurrency.lockutils [req-3f0c1728-5f0b-4507-b7db-a3294dfb78c2 req-fa4b32d3-cd7f-4d0e-b5a3-4b28790742d6 service nova] Acquired lock "refresh_cache-e6acdc45-5e8f-4ff0-9259-3de73a6fdd14" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1084.575090] env[69994]: DEBUG nova.network.neutron [req-3f0c1728-5f0b-4507-b7db-a3294dfb78c2 req-fa4b32d3-cd7f-4d0e-b5a3-4b28790742d6 service nova] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Refreshing network info cache for port 1a71caa6-eaba-4605-b4de-9df7bfa68007 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1084.637609] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7229e759-3c35-4ed6-8c43-dec9f64a4d33 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1084.759889] env[69994]: DEBUG oslo_vmware.api [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242507, 'name': ReconfigVM_Task, 'duration_secs': 0.326387} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.763282] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Reconfigured VM instance instance-0000004f to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1084.764724] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ae9a172-6324-4a03-91b9-ac6a1c1bc59c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.799333] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] e17fcc84-7c86-41b6-88ec-8a35619534b6/e17fcc84-7c86-41b6-88ec-8a35619534b6.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1084.802482] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c919d9d3-2c82-4f3a-9d0e-984a937efc08 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.822181] env[69994]: DEBUG oslo_vmware.api [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1084.822181] env[69994]: value = "task-3242509" [ 1084.822181] env[69994]: _type = "Task" [ 1084.822181] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.832237] env[69994]: DEBUG oslo_vmware.api [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242509, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.029797] env[69994]: DEBUG nova.network.neutron [-] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1085.047244] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffe5b5b5-ad34-417c-9e32-dbd9ec88ea0f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.057539] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54303b4c-4aa1-43ab-aa18-acb19109c3fa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.093655] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3738ccbc-e5a8-45e4-9bfe-492e7beb7e6f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.102366] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a449a80-23f2-44ce-af5e-23348e939c04 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.116379] env[69994]: DEBUG nova.compute.provider_tree [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1085.229804] env[69994]: DEBUG nova.compute.manager [req-887313aa-224b-4c08-b9d4-5a1b41929b66 req-cdd98680-91da-4fe2-8917-78bbf05187c8 service nova] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Received event network-vif-deleted-ecc3d187-2e74-4aab-9518-f073b26b0101 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1085.333366] env[69994]: DEBUG oslo_vmware.api [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242509, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.470555] env[69994]: DEBUG nova.network.neutron [req-3f0c1728-5f0b-4507-b7db-a3294dfb78c2 req-fa4b32d3-cd7f-4d0e-b5a3-4b28790742d6 service nova] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Updated VIF entry in instance network info cache for port 1a71caa6-eaba-4605-b4de-9df7bfa68007. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1085.470962] env[69994]: DEBUG nova.network.neutron [req-3f0c1728-5f0b-4507-b7db-a3294dfb78c2 req-fa4b32d3-cd7f-4d0e-b5a3-4b28790742d6 service nova] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Updating instance_info_cache with network_info: [{"id": "1a71caa6-eaba-4605-b4de-9df7bfa68007", "address": "fa:16:3e:6e:82:b8", "network": {"id": "ac8cc640-01c3-4a64-8b69-1458ee2131a1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1685085861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.225", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38d5a89ed7c248c3be506ef12caf5f1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a71caa6-ea", "ovs_interfaceid": "1a71caa6-eaba-4605-b4de-9df7bfa68007", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1085.534027] env[69994]: INFO nova.compute.manager [-] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Took 1.56 seconds to deallocate network for instance. [ 1085.619518] env[69994]: DEBUG nova.scheduler.client.report [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1085.833586] env[69994]: DEBUG oslo_vmware.api [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242509, 'name': ReconfigVM_Task, 'duration_secs': 0.801349} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.833864] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Reconfigured VM instance instance-0000004f to attach disk [datastore1] e17fcc84-7c86-41b6-88ec-8a35619534b6/e17fcc84-7c86-41b6-88ec-8a35619534b6.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1085.834155] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Updating instance 'e17fcc84-7c86-41b6-88ec-8a35619534b6' progress to 50 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1085.979385] env[69994]: DEBUG oslo_concurrency.lockutils [req-3f0c1728-5f0b-4507-b7db-a3294dfb78c2 req-fa4b32d3-cd7f-4d0e-b5a3-4b28790742d6 service nova] Releasing lock "refresh_cache-e6acdc45-5e8f-4ff0-9259-3de73a6fdd14" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1086.039796] env[69994]: DEBUG oslo_concurrency.lockutils [None req-202ece9c-e47b-4fca-9d83-9e1e4bdbbb8c tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1086.124926] env[69994]: DEBUG oslo_concurrency.lockutils [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.426s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1086.125486] env[69994]: DEBUG nova.compute.manager [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1086.127993] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4bd96866-6624-4bda-b758-c5a51d667a70 tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.840s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1086.128268] env[69994]: DEBUG nova.objects.instance [None req-4bd96866-6624-4bda-b758-c5a51d667a70 tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Lazy-loading 'resources' on Instance uuid 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1086.341514] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69ea0a88-305d-4a76-8e9a-63990c541616 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.364620] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9505fa31-ec38-44ba-97f2-2783fd396240 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.385583] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Updating instance 'e17fcc84-7c86-41b6-88ec-8a35619534b6' progress to 67 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1086.639679] env[69994]: DEBUG nova.compute.utils [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1086.641948] env[69994]: DEBUG nova.compute.manager [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Not allocating networking since 'none' was specified. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1086.854211] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c44497bb-96e1-4eda-8deb-4e48ee18c1df {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.862026] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae7e713b-0886-4840-9a5d-cc7335c97fb2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.897084] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1f59ab3-914f-4d64-a748-5af5e62acd5c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.907299] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ed4099b-1d64-47ec-b30f-49a5c40d9ed9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.922350] env[69994]: DEBUG nova.compute.provider_tree [None req-4bd96866-6624-4bda-b758-c5a51d667a70 tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1087.143588] env[69994]: DEBUG nova.compute.manager [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1087.428084] env[69994]: DEBUG nova.scheduler.client.report [None req-4bd96866-6624-4bda-b758-c5a51d667a70 tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1087.934084] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4bd96866-6624-4bda-b758-c5a51d667a70 tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.803s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1087.934084] env[69994]: DEBUG oslo_concurrency.lockutils [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.345s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1087.936493] env[69994]: INFO nova.compute.claims [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1087.957439] env[69994]: INFO nova.scheduler.client.report [None req-4bd96866-6624-4bda-b758-c5a51d667a70 tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Deleted allocations for instance 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c [ 1088.050260] env[69994]: DEBUG nova.network.neutron [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Port b2521bc7-942e-4d29-bc89-0fd13a02f783 binding to destination host cpu-1 is already ACTIVE {{(pid=69994) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1088.154098] env[69994]: DEBUG nova.compute.manager [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1088.180268] env[69994]: DEBUG nova.virt.hardware [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1088.180532] env[69994]: DEBUG nova.virt.hardware [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1088.180697] env[69994]: DEBUG nova.virt.hardware [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1088.181193] env[69994]: DEBUG nova.virt.hardware [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1088.181193] env[69994]: DEBUG nova.virt.hardware [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1088.181193] env[69994]: DEBUG nova.virt.hardware [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1088.181460] env[69994]: DEBUG nova.virt.hardware [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1088.181575] env[69994]: DEBUG nova.virt.hardware [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1088.181721] env[69994]: DEBUG nova.virt.hardware [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1088.181881] env[69994]: DEBUG nova.virt.hardware [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1088.182064] env[69994]: DEBUG nova.virt.hardware [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1088.182957] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-867067c1-b050-409e-bd9a-cfea7ed201cc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.190943] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6b08a0e-734c-42ac-9490-53803c2e073c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.205126] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Instance VIF info [] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1088.210873] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Creating folder: Project (f873305a59fd4ab3b186031f81507c19). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1088.211211] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dc21652a-b187-4cc0-b60c-f5626a0a4abd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.221785] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Created folder: Project (f873305a59fd4ab3b186031f81507c19) in parent group-v647729. [ 1088.221988] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Creating folder: Instances. Parent ref: group-v648004. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1088.222233] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8dba5719-85aa-4098-a55e-76b5a6360f36 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.231425] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Created folder: Instances in parent group-v648004. [ 1088.231654] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1088.231839] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1088.232088] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f8f61844-af20-4413-9d4d-8555180ca6f9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.251576] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1088.251576] env[69994]: value = "task-3242512" [ 1088.251576] env[69994]: _type = "Task" [ 1088.251576] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.259627] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242512, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.362457] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5c641e90-cf6a-4636-b85c-02ce8fd9114a tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquiring lock "b99b73e6-3348-4d5d-aa57-f01ace0bfc42" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1088.362457] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5c641e90-cf6a-4636-b85c-02ce8fd9114a tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lock "b99b73e6-3348-4d5d-aa57-f01ace0bfc42" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1088.362457] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5c641e90-cf6a-4636-b85c-02ce8fd9114a tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquiring lock "b99b73e6-3348-4d5d-aa57-f01ace0bfc42-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1088.362457] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5c641e90-cf6a-4636-b85c-02ce8fd9114a tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lock "b99b73e6-3348-4d5d-aa57-f01ace0bfc42-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1088.362457] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5c641e90-cf6a-4636-b85c-02ce8fd9114a tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lock "b99b73e6-3348-4d5d-aa57-f01ace0bfc42-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.364309] env[69994]: INFO nova.compute.manager [None req-5c641e90-cf6a-4636-b85c-02ce8fd9114a tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Terminating instance [ 1088.463747] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4bd96866-6624-4bda-b758-c5a51d667a70 tempest-ServerTagsTestJSON-1728015558 tempest-ServerTagsTestJSON-1728015558-project-member] Lock "0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.791s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.762232] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242512, 'name': CreateVM_Task, 'duration_secs': 0.284631} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.762339] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1088.762770] env[69994]: DEBUG oslo_concurrency.lockutils [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.762931] env[69994]: DEBUG oslo_concurrency.lockutils [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1088.763269] env[69994]: DEBUG oslo_concurrency.lockutils [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1088.763522] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa58d9a2-cfbe-4ec7-b831-30aef628549d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.767866] env[69994]: DEBUG oslo_vmware.api [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Waiting for the task: (returnval){ [ 1088.767866] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]522b2521-ecfc-daa7-9e1d-c3f0ac857dea" [ 1088.767866] env[69994]: _type = "Task" [ 1088.767866] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.775296] env[69994]: DEBUG oslo_vmware.api [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]522b2521-ecfc-daa7-9e1d-c3f0ac857dea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.867828] env[69994]: DEBUG nova.compute.manager [None req-5c641e90-cf6a-4636-b85c-02ce8fd9114a tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1088.868093] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5c641e90-cf6a-4636-b85c-02ce8fd9114a tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1088.869851] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d2706e1-bd6a-43ff-9762-521812947b16 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.877318] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c641e90-cf6a-4636-b85c-02ce8fd9114a tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1088.877571] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e059133c-005d-4b59-8317-be772140ed3d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.883509] env[69994]: DEBUG oslo_vmware.api [None req-5c641e90-cf6a-4636-b85c-02ce8fd9114a tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 1088.883509] env[69994]: value = "task-3242513" [ 1088.883509] env[69994]: _type = "Task" [ 1088.883509] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.891411] env[69994]: DEBUG oslo_vmware.api [None req-5c641e90-cf6a-4636-b85c-02ce8fd9114a tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242513, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.075921] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "e17fcc84-7c86-41b6-88ec-8a35619534b6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1089.079729] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "e17fcc84-7c86-41b6-88ec-8a35619534b6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1089.079729] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "e17fcc84-7c86-41b6-88ec-8a35619534b6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1089.222412] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65295b09-3529-4bf6-93fd-757d33618ca8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.230862] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-666080f6-2500-45f8-a30a-7e3bfac493d0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.261805] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b2cd0c0-724f-49a4-92e6-bed162c3c983 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.272446] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e63266fd-7d4c-41b2-b036-2b32a010c662 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.281493] env[69994]: DEBUG oslo_vmware.api [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]522b2521-ecfc-daa7-9e1d-c3f0ac857dea, 'name': SearchDatastore_Task, 'duration_secs': 0.010205} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.289115] env[69994]: DEBUG oslo_concurrency.lockutils [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1089.289367] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1089.289634] env[69994]: DEBUG oslo_concurrency.lockutils [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.289806] env[69994]: DEBUG oslo_concurrency.lockutils [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1089.290016] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1089.290500] env[69994]: DEBUG nova.compute.provider_tree [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1089.291936] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9dc3298a-ba8b-47e0-a22f-d7deaf1fafe6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.300798] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1089.300798] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1089.301531] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3745110b-bfa5-4cde-9347-4be60451d37a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.308023] env[69994]: DEBUG oslo_vmware.api [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Waiting for the task: (returnval){ [ 1089.308023] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52f358a5-d85c-4c7e-76df-a440929ad71b" [ 1089.308023] env[69994]: _type = "Task" [ 1089.308023] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.314173] env[69994]: DEBUG oslo_vmware.api [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52f358a5-d85c-4c7e-76df-a440929ad71b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.393302] env[69994]: DEBUG oslo_vmware.api [None req-5c641e90-cf6a-4636-b85c-02ce8fd9114a tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242513, 'name': PowerOffVM_Task, 'duration_secs': 0.226279} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.393682] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c641e90-cf6a-4636-b85c-02ce8fd9114a tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1089.393890] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5c641e90-cf6a-4636-b85c-02ce8fd9114a tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1089.394149] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c98c0b3c-5833-49f7-8c80-968836ea5971 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.459246] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5c641e90-cf6a-4636-b85c-02ce8fd9114a tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1089.459485] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5c641e90-cf6a-4636-b85c-02ce8fd9114a tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1089.459707] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c641e90-cf6a-4636-b85c-02ce8fd9114a tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Deleting the datastore file [datastore2] b99b73e6-3348-4d5d-aa57-f01ace0bfc42 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1089.459971] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ea197792-fe28-4510-ac7a-2abcd2795408 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.469493] env[69994]: DEBUG oslo_vmware.api [None req-5c641e90-cf6a-4636-b85c-02ce8fd9114a tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 1089.469493] env[69994]: value = "task-3242515" [ 1089.469493] env[69994]: _type = "Task" [ 1089.469493] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.478872] env[69994]: DEBUG oslo_vmware.api [None req-5c641e90-cf6a-4636-b85c-02ce8fd9114a tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242515, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.731140] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Acquiring lock "05993c51-605c-4154-afc1-f3bc5344258c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1089.731360] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Lock "05993c51-605c-4154-afc1-f3bc5344258c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1089.795658] env[69994]: DEBUG nova.scheduler.client.report [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1089.818246] env[69994]: DEBUG oslo_vmware.api [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52f358a5-d85c-4c7e-76df-a440929ad71b, 'name': SearchDatastore_Task, 'duration_secs': 0.008108} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.819083] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb31f961-2cb0-4cc9-bebb-5c51a103e63b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.824473] env[69994]: DEBUG oslo_vmware.api [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Waiting for the task: (returnval){ [ 1089.824473] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]521603e7-dba5-cfc1-b8ca-1b444ca27175" [ 1089.824473] env[69994]: _type = "Task" [ 1089.824473] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.833306] env[69994]: DEBUG oslo_vmware.api [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521603e7-dba5-cfc1-b8ca-1b444ca27175, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.983451] env[69994]: DEBUG oslo_vmware.api [None req-5c641e90-cf6a-4636-b85c-02ce8fd9114a tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242515, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.179082} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.984138] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c641e90-cf6a-4636-b85c-02ce8fd9114a tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1089.984486] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5c641e90-cf6a-4636-b85c-02ce8fd9114a tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1089.984803] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5c641e90-cf6a-4636-b85c-02ce8fd9114a tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1089.985560] env[69994]: INFO nova.compute.manager [None req-5c641e90-cf6a-4636-b85c-02ce8fd9114a tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1089.985755] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5c641e90-cf6a-4636-b85c-02ce8fd9114a tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1089.986096] env[69994]: DEBUG nova.compute.manager [-] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1089.986702] env[69994]: DEBUG nova.network.neutron [-] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1090.151255] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "refresh_cache-e17fcc84-7c86-41b6-88ec-8a35619534b6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1090.151460] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquired lock "refresh_cache-e17fcc84-7c86-41b6-88ec-8a35619534b6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1090.151638] env[69994]: DEBUG nova.network.neutron [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1090.233810] env[69994]: DEBUG nova.compute.manager [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1090.300110] env[69994]: DEBUG oslo_concurrency.lockutils [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.366s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.300794] env[69994]: DEBUG nova.compute.manager [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1090.303773] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 9.427s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1090.304014] env[69994]: DEBUG nova.objects.instance [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69994) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1090.330822] env[69994]: DEBUG nova.compute.manager [req-25ca6906-e849-4dbd-a0e1-6f2f38fb3c90 req-0a7d6251-1c8c-4335-ad40-323e62e3e2d6 service nova] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Received event network-vif-deleted-2ad3bbc3-0a2c-4d13-a9e1-cbc2bdca55c0 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1090.331142] env[69994]: INFO nova.compute.manager [req-25ca6906-e849-4dbd-a0e1-6f2f38fb3c90 req-0a7d6251-1c8c-4335-ad40-323e62e3e2d6 service nova] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Neutron deleted interface 2ad3bbc3-0a2c-4d13-a9e1-cbc2bdca55c0; detaching it from the instance and deleting it from the info cache [ 1090.331300] env[69994]: DEBUG nova.network.neutron [req-25ca6906-e849-4dbd-a0e1-6f2f38fb3c90 req-0a7d6251-1c8c-4335-ad40-323e62e3e2d6 service nova] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.340893] env[69994]: DEBUG oslo_vmware.api [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521603e7-dba5-cfc1-b8ca-1b444ca27175, 'name': SearchDatastore_Task, 'duration_secs': 0.009532} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.341668] env[69994]: DEBUG oslo_concurrency.lockutils [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1090.341942] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] dd5ebc73-5866-4a5b-9d4f-aac721b0da8d/dd5ebc73-5866-4a5b-9d4f-aac721b0da8d.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1090.342699] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-52c4cd7c-2fb1-4a41-9190-580ce70c3424 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.352343] env[69994]: DEBUG oslo_vmware.api [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Waiting for the task: (returnval){ [ 1090.352343] env[69994]: value = "task-3242516" [ 1090.352343] env[69994]: _type = "Task" [ 1090.352343] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.360671] env[69994]: DEBUG oslo_vmware.api [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': task-3242516, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.760547] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1090.808626] env[69994]: DEBUG nova.compute.utils [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1090.810717] env[69994]: DEBUG nova.compute.manager [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1090.810882] env[69994]: DEBUG nova.network.neutron [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1090.816025] env[69994]: DEBUG nova.network.neutron [-] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.836017] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d6614797-4be6-4b21-a11f-694f419dc974 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.843656] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e059a3e-5215-4ab0-9a8b-d0141c4e136f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.859131] env[69994]: DEBUG nova.policy [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '761ebe718b0f48939612e82c6b1e6766', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a4c158f7555d4606b641be4264d95eaa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1090.866737] env[69994]: DEBUG oslo_vmware.api [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': task-3242516, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.478207} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.877084] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] dd5ebc73-5866-4a5b-9d4f-aac721b0da8d/dd5ebc73-5866-4a5b-9d4f-aac721b0da8d.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1090.877278] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1090.877928] env[69994]: DEBUG nova.compute.manager [req-25ca6906-e849-4dbd-a0e1-6f2f38fb3c90 req-0a7d6251-1c8c-4335-ad40-323e62e3e2d6 service nova] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Detach interface failed, port_id=2ad3bbc3-0a2c-4d13-a9e1-cbc2bdca55c0, reason: Instance b99b73e6-3348-4d5d-aa57-f01ace0bfc42 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1090.878331] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5e838ae2-56d3-4372-81fd-ee0fa95f5086 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.885025] env[69994]: DEBUG oslo_vmware.api [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Waiting for the task: (returnval){ [ 1090.885025] env[69994]: value = "task-3242517" [ 1090.885025] env[69994]: _type = "Task" [ 1090.885025] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.893066] env[69994]: DEBUG oslo_vmware.api [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': task-3242517, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.957120] env[69994]: DEBUG nova.network.neutron [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Updating instance_info_cache with network_info: [{"id": "b2521bc7-942e-4d29-bc89-0fd13a02f783", "address": "fa:16:3e:90:f2:06", "network": {"id": "6d4a143e-4f55-4918-8454-462892aacf0e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1594130263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "605d72502cc644bfa4d875bf348246de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52e117d3-d120-42c6-8e72-70085845acbf", "external-id": "nsx-vlan-transportzone-934", "segmentation_id": 934, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2521bc7-94", "ovs_interfaceid": "b2521bc7-942e-4d29-bc89-0fd13a02f783", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.159441] env[69994]: DEBUG nova.network.neutron [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Successfully created port: 41e39a21-c33b-4cc5-80b2-896e3ac13b2e {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1091.318652] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f8b93159-1cfe-42b5-b9af-77a8e7ec9037 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1091.320416] env[69994]: DEBUG nova.compute.manager [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1091.327557] env[69994]: INFO nova.compute.manager [-] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Took 1.34 seconds to deallocate network for instance. [ 1091.328062] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3a497d92-fea4-462c-be0d-0ae703185b1a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.387s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1091.328282] env[69994]: DEBUG nova.objects.instance [None req-3a497d92-fea4-462c-be0d-0ae703185b1a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lazy-loading 'resources' on Instance uuid f00662a9-92e0-4520-9ced-3cfd6e83628b {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1091.398374] env[69994]: DEBUG oslo_vmware.api [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': task-3242517, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066075} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.398624] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1091.399557] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5fb4b1d-a42b-4466-9905-8d02c2741322 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.422575] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] dd5ebc73-5866-4a5b-9d4f-aac721b0da8d/dd5ebc73-5866-4a5b-9d4f-aac721b0da8d.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1091.422575] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-65ccd541-ff74-475e-b22e-2edfdf6f5cf1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.440603] env[69994]: DEBUG oslo_vmware.api [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Waiting for the task: (returnval){ [ 1091.440603] env[69994]: value = "task-3242518" [ 1091.440603] env[69994]: _type = "Task" [ 1091.440603] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.448482] env[69994]: DEBUG oslo_vmware.api [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': task-3242518, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.461299] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Releasing lock "refresh_cache-e17fcc84-7c86-41b6-88ec-8a35619534b6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1091.840038] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5c641e90-cf6a-4636-b85c-02ce8fd9114a tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1091.953034] env[69994]: DEBUG oslo_vmware.api [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': task-3242518, 'name': ReconfigVM_Task, 'duration_secs': 0.312973} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.953034] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Reconfigured VM instance instance-00000060 to attach disk [datastore2] dd5ebc73-5866-4a5b-9d4f-aac721b0da8d/dd5ebc73-5866-4a5b-9d4f-aac721b0da8d.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1091.953922] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-41ee6e1e-87ba-4e50-9092-fe3d47a4693d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.962327] env[69994]: DEBUG oslo_vmware.api [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Waiting for the task: (returnval){ [ 1091.962327] env[69994]: value = "task-3242519" [ 1091.962327] env[69994]: _type = "Task" [ 1091.962327] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.973848] env[69994]: DEBUG oslo_vmware.api [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': task-3242519, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.976955] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06537f23-fe3a-45cd-aee9-dd7c9cc079d5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.983300] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcbf3b35-1d04-411f-9cfb-b3dd616f8af6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.038126] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-906a2123-bd0d-46c3-b88a-31e25b8a3d8d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.045252] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fee8fb5-b9e3-4710-a176-602808e5875c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.076998] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dc6a1d6-622a-46c9-891c-c34b18fff8c5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.084256] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90c3ee00-4932-4c28-9136-d7a8e748298f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.097162] env[69994]: DEBUG nova.compute.provider_tree [None req-3a497d92-fea4-462c-be0d-0ae703185b1a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1092.333295] env[69994]: DEBUG nova.compute.manager [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1092.358669] env[69994]: DEBUG nova.virt.hardware [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1092.358908] env[69994]: DEBUG nova.virt.hardware [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1092.359078] env[69994]: DEBUG nova.virt.hardware [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1092.359266] env[69994]: DEBUG nova.virt.hardware [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1092.359410] env[69994]: DEBUG nova.virt.hardware [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1092.359554] env[69994]: DEBUG nova.virt.hardware [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1092.359757] env[69994]: DEBUG nova.virt.hardware [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1092.359914] env[69994]: DEBUG nova.virt.hardware [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1092.360093] env[69994]: DEBUG nova.virt.hardware [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1092.360302] env[69994]: DEBUG nova.virt.hardware [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1092.360489] env[69994]: DEBUG nova.virt.hardware [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1092.361397] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a84b77e2-f68c-4b53-98fe-285cdd07c44c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.369209] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a58bc67-9967-48c9-a2d6-71a496c95e71 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.472149] env[69994]: DEBUG oslo_vmware.api [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': task-3242519, 'name': Rename_Task, 'duration_secs': 0.138393} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.472433] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1092.472677] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fceb75bd-81a3-4fa9-9faf-a264dc72d8c2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.478826] env[69994]: DEBUG oslo_vmware.api [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Waiting for the task: (returnval){ [ 1092.478826] env[69994]: value = "task-3242520" [ 1092.478826] env[69994]: _type = "Task" [ 1092.478826] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.486492] env[69994]: DEBUG oslo_vmware.api [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': task-3242520, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.600568] env[69994]: DEBUG nova.scheduler.client.report [None req-3a497d92-fea4-462c-be0d-0ae703185b1a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1092.680022] env[69994]: DEBUG nova.compute.manager [req-b8e30fee-7312-4f7e-8c8c-62d3e9758692 req-b48f5324-9f5b-4ec8-bcc4-bf6907185ba8 service nova] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Received event network-vif-plugged-41e39a21-c33b-4cc5-80b2-896e3ac13b2e {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1092.680022] env[69994]: DEBUG oslo_concurrency.lockutils [req-b8e30fee-7312-4f7e-8c8c-62d3e9758692 req-b48f5324-9f5b-4ec8-bcc4-bf6907185ba8 service nova] Acquiring lock "19fedc80-8def-426a-af73-ad871e127e02-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1092.680234] env[69994]: DEBUG oslo_concurrency.lockutils [req-b8e30fee-7312-4f7e-8c8c-62d3e9758692 req-b48f5324-9f5b-4ec8-bcc4-bf6907185ba8 service nova] Lock "19fedc80-8def-426a-af73-ad871e127e02-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1092.680410] env[69994]: DEBUG oslo_concurrency.lockutils [req-b8e30fee-7312-4f7e-8c8c-62d3e9758692 req-b48f5324-9f5b-4ec8-bcc4-bf6907185ba8 service nova] Lock "19fedc80-8def-426a-af73-ad871e127e02-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1092.680614] env[69994]: DEBUG nova.compute.manager [req-b8e30fee-7312-4f7e-8c8c-62d3e9758692 req-b48f5324-9f5b-4ec8-bcc4-bf6907185ba8 service nova] [instance: 19fedc80-8def-426a-af73-ad871e127e02] No waiting events found dispatching network-vif-plugged-41e39a21-c33b-4cc5-80b2-896e3ac13b2e {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1092.680789] env[69994]: WARNING nova.compute.manager [req-b8e30fee-7312-4f7e-8c8c-62d3e9758692 req-b48f5324-9f5b-4ec8-bcc4-bf6907185ba8 service nova] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Received unexpected event network-vif-plugged-41e39a21-c33b-4cc5-80b2-896e3ac13b2e for instance with vm_state building and task_state spawning. [ 1092.988850] env[69994]: DEBUG oslo_vmware.api [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': task-3242520, 'name': PowerOnVM_Task, 'duration_secs': 0.397579} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.989146] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1092.989319] env[69994]: INFO nova.compute.manager [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Took 4.84 seconds to spawn the instance on the hypervisor. [ 1092.989498] env[69994]: DEBUG nova.compute.manager [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1092.990250] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82d5a380-21d4-4e9b-97c8-71704761347d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.082870] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13c9e0bc-f933-4b52-8995-2dd5cd140f53 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.104211] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aae66bd-aa8d-4708-9a12-3a85ec264f1d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.107313] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3a497d92-fea4-462c-be0d-0ae703185b1a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.779s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1093.109336] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 9.060s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1093.109515] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1093.109670] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69994) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1093.110050] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aa0b7cab-5cd5-4373-89c6-a0b972244185 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 8.844s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1093.112026] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c0f358a-d08d-4346-943b-747de715114a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.117570] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Updating instance 'e17fcc84-7c86-41b6-88ec-8a35619534b6' progress to 83 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1093.127168] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-badf6cb4-bb17-4c3b-8da0-ede6e2107a29 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.131809] env[69994]: INFO nova.scheduler.client.report [None req-3a497d92-fea4-462c-be0d-0ae703185b1a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Deleted allocations for instance f00662a9-92e0-4520-9ced-3cfd6e83628b [ 1093.147029] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ec88219-ecd0-42a5-a4eb-521151bde25e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.151158] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05272e18-fd2d-4a07-8bb9-4d09798f30c9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.185015] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179190MB free_disk=120GB free_vcpus=48 pci_devices=None {{(pid=69994) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1093.185143] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1093.243765] env[69994]: DEBUG nova.network.neutron [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Successfully updated port: 41e39a21-c33b-4cc5-80b2-896e3ac13b2e {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1093.263191] env[69994]: DEBUG nova.compute.manager [req-ff390bd1-d1e0-4f8b-bb05-90148f1b7ffa req-5dcaa4c0-a254-41f8-b33d-6e7cb443eeeb service nova] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Received event network-changed-41e39a21-c33b-4cc5-80b2-896e3ac13b2e {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1093.263393] env[69994]: DEBUG nova.compute.manager [req-ff390bd1-d1e0-4f8b-bb05-90148f1b7ffa req-5dcaa4c0-a254-41f8-b33d-6e7cb443eeeb service nova] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Refreshing instance network info cache due to event network-changed-41e39a21-c33b-4cc5-80b2-896e3ac13b2e. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1093.263603] env[69994]: DEBUG oslo_concurrency.lockutils [req-ff390bd1-d1e0-4f8b-bb05-90148f1b7ffa req-5dcaa4c0-a254-41f8-b33d-6e7cb443eeeb service nova] Acquiring lock "refresh_cache-19fedc80-8def-426a-af73-ad871e127e02" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.263745] env[69994]: DEBUG oslo_concurrency.lockutils [req-ff390bd1-d1e0-4f8b-bb05-90148f1b7ffa req-5dcaa4c0-a254-41f8-b33d-6e7cb443eeeb service nova] Acquired lock "refresh_cache-19fedc80-8def-426a-af73-ad871e127e02" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1093.263902] env[69994]: DEBUG nova.network.neutron [req-ff390bd1-d1e0-4f8b-bb05-90148f1b7ffa req-5dcaa4c0-a254-41f8-b33d-6e7cb443eeeb service nova] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Refreshing network info cache for port 41e39a21-c33b-4cc5-80b2-896e3ac13b2e {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1093.509119] env[69994]: INFO nova.compute.manager [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Took 17.78 seconds to build instance. [ 1093.623358] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1093.623671] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6a77d457-b4b7-4b24-bb24-20eccb2b7d03 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.630984] env[69994]: DEBUG oslo_vmware.api [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1093.630984] env[69994]: value = "task-3242521" [ 1093.630984] env[69994]: _type = "Task" [ 1093.630984] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.640908] env[69994]: DEBUG oslo_vmware.api [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242521, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.646763] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3a497d92-fea4-462c-be0d-0ae703185b1a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "f00662a9-92e0-4520-9ced-3cfd6e83628b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.621s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1093.746769] env[69994]: DEBUG oslo_concurrency.lockutils [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "refresh_cache-19fedc80-8def-426a-af73-ad871e127e02" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.797438] env[69994]: DEBUG nova.network.neutron [req-ff390bd1-d1e0-4f8b-bb05-90148f1b7ffa req-5dcaa4c0-a254-41f8-b33d-6e7cb443eeeb service nova] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1093.802453] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-978d09f1-6571-4ce3-82fb-9eaf5fe86beb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.809960] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-712c802c-4fe6-41aa-b72b-d0319cc5e3ac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.845864] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb813b25-3de4-42ce-b02b-aa629ee0f5b2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.854311] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-262069c8-8213-425b-87b1-18e82cb03ded {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.868518] env[69994]: DEBUG nova.compute.provider_tree [None req-aa0b7cab-5cd5-4373-89c6-a0b972244185 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1093.880130] env[69994]: DEBUG nova.network.neutron [req-ff390bd1-d1e0-4f8b-bb05-90148f1b7ffa req-5dcaa4c0-a254-41f8-b33d-6e7cb443eeeb service nova] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1094.011976] env[69994]: DEBUG oslo_concurrency.lockutils [None req-409f9248-a42c-4eb4-8abf-6beb5b7b97f9 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Lock "dd5ebc73-5866-4a5b-9d4f-aac721b0da8d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.301s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1094.142019] env[69994]: DEBUG oslo_vmware.api [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242521, 'name': PowerOnVM_Task, 'duration_secs': 0.380626} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.142358] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1094.142559] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7f8fa62b-c662-4b57-88a3-f6228d476006 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Updating instance 'e17fcc84-7c86-41b6-88ec-8a35619534b6' progress to 100 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1094.371469] env[69994]: DEBUG nova.scheduler.client.report [None req-aa0b7cab-5cd5-4373-89c6-a0b972244185 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1094.382757] env[69994]: DEBUG oslo_concurrency.lockutils [req-ff390bd1-d1e0-4f8b-bb05-90148f1b7ffa req-5dcaa4c0-a254-41f8-b33d-6e7cb443eeeb service nova] Releasing lock "refresh_cache-19fedc80-8def-426a-af73-ad871e127e02" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1094.383158] env[69994]: DEBUG oslo_concurrency.lockutils [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquired lock "refresh_cache-19fedc80-8def-426a-af73-ad871e127e02" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1094.383332] env[69994]: DEBUG nova.network.neutron [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1094.915219] env[69994]: DEBUG nova.network.neutron [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1094.919685] env[69994]: INFO nova.compute.manager [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Rebuilding instance [ 1094.966056] env[69994]: DEBUG nova.compute.manager [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1094.967295] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5001f00e-f982-4ff0-ba65-32b5386a8548 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.058792] env[69994]: DEBUG nova.network.neutron [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Updating instance_info_cache with network_info: [{"id": "41e39a21-c33b-4cc5-80b2-896e3ac13b2e", "address": "fa:16:3e:2f:74:7f", "network": {"id": "7880c31a-48c5-419f-856f-539c513f7147", "bridge": "br-int", "label": "tempest-ImagesTestJSON-152259375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4c158f7555d4606b641be4264d95eaa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41e39a21-c3", "ovs_interfaceid": "41e39a21-c33b-4cc5-80b2-896e3ac13b2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1095.381797] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aa0b7cab-5cd5-4373-89c6-a0b972244185 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.272s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1095.384691] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7229e759-3c35-4ed6-8c43-dec9f64a4d33 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.747s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1095.384920] env[69994]: DEBUG nova.objects.instance [None req-7229e759-3c35-4ed6-8c43-dec9f64a4d33 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lazy-loading 'resources' on Instance uuid fc31da72-d09e-415e-9866-3e7fc91fec79 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1095.561897] env[69994]: DEBUG oslo_concurrency.lockutils [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Releasing lock "refresh_cache-19fedc80-8def-426a-af73-ad871e127e02" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1095.562319] env[69994]: DEBUG nova.compute.manager [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Instance network_info: |[{"id": "41e39a21-c33b-4cc5-80b2-896e3ac13b2e", "address": "fa:16:3e:2f:74:7f", "network": {"id": "7880c31a-48c5-419f-856f-539c513f7147", "bridge": "br-int", "label": "tempest-ImagesTestJSON-152259375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4c158f7555d4606b641be4264d95eaa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41e39a21-c3", "ovs_interfaceid": "41e39a21-c33b-4cc5-80b2-896e3ac13b2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1095.562788] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2f:74:7f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4728adca-2846-416a-91a3-deb898faf1f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '41e39a21-c33b-4cc5-80b2-896e3ac13b2e', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1095.570450] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1095.570668] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1095.571382] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-54044273-3c46-4e76-a998-68bdef16ee8b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.592558] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1095.592558] env[69994]: value = "task-3242522" [ 1095.592558] env[69994]: _type = "Task" [ 1095.592558] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.600644] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242522, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.945491] env[69994]: INFO nova.scheduler.client.report [None req-aa0b7cab-5cd5-4373-89c6-a0b972244185 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Deleted allocation for migration fdd10aa8-31c5-4ca6-937d-23c4b6d5f3f4 [ 1095.982256] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1095.983184] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b476f86f-9bb4-46e8-9d1e-28317e97ac4f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.990795] env[69994]: DEBUG oslo_vmware.api [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Waiting for the task: (returnval){ [ 1095.990795] env[69994]: value = "task-3242523" [ 1095.990795] env[69994]: _type = "Task" [ 1095.990795] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.001624] env[69994]: DEBUG oslo_vmware.api [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': task-3242523, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.074529] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2720b61-0ca5-4de0-9ba4-95429db5ab63 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.083280] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27748129-3187-4e66-9d8d-97e91618ae3b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.118753] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dcb4058-2b85-4c58-ac29-79ab879c6345 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.130276] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242522, 'name': CreateVM_Task, 'duration_secs': 0.310338} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.131770] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1096.132505] env[69994]: DEBUG oslo_concurrency.lockutils [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1096.132703] env[69994]: DEBUG oslo_concurrency.lockutils [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1096.133047] env[69994]: DEBUG oslo_concurrency.lockutils [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1096.134285] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0c5cf2f-8c64-4de5-a3ba-d2016698f08f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.138474] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df10ff4d-d6c1-4aca-8dd2-4a678d976e79 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.150275] env[69994]: DEBUG nova.compute.provider_tree [None req-7229e759-3c35-4ed6-8c43-dec9f64a4d33 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1096.152495] env[69994]: DEBUG oslo_vmware.api [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1096.152495] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]528db0f2-00b6-c89a-e65f-5d2aeb29c017" [ 1096.152495] env[69994]: _type = "Task" [ 1096.152495] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.160549] env[69994]: DEBUG oslo_vmware.api [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]528db0f2-00b6-c89a-e65f-5d2aeb29c017, 'name': SearchDatastore_Task, 'duration_secs': 0.009109} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.161284] env[69994]: DEBUG oslo_concurrency.lockutils [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1096.161510] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1096.161741] env[69994]: DEBUG oslo_concurrency.lockutils [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1096.161885] env[69994]: DEBUG oslo_concurrency.lockutils [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1096.162096] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1096.162550] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-733744f4-e390-4d01-b1fa-32b01963e1c8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.169448] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1096.169629] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1096.170520] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-051371e7-3007-4737-afc5-256458a98c3d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.175379] env[69994]: DEBUG oslo_vmware.api [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1096.175379] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52aedb21-5273-7de9-ec20-cccfa4bc6f88" [ 1096.175379] env[69994]: _type = "Task" [ 1096.175379] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.183607] env[69994]: DEBUG oslo_vmware.api [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52aedb21-5273-7de9-ec20-cccfa4bc6f88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.451423] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aa0b7cab-5cd5-4373-89c6-a0b972244185 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "ab99499b-21a2-465b-9975-4e0adb18df94" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 14.668s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1096.500195] env[69994]: DEBUG oslo_vmware.api [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': task-3242523, 'name': PowerOffVM_Task, 'duration_secs': 0.146857} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.500465] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1096.501145] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1096.501907] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4836ec87-5435-4090-b45f-122145e6d3eb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.508677] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1096.508932] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3703943d-3d1f-4768-a626-cc8857b3d96b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.532775] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1096.533165] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1096.533442] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Deleting the datastore file [datastore2] dd5ebc73-5866-4a5b-9d4f-aac721b0da8d {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1096.533779] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7906e985-69f4-4077-86e9-ed7dedceb6b4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.542062] env[69994]: DEBUG oslo_vmware.api [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Waiting for the task: (returnval){ [ 1096.542062] env[69994]: value = "task-3242525" [ 1096.542062] env[69994]: _type = "Task" [ 1096.542062] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.552977] env[69994]: DEBUG oslo_vmware.api [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': task-3242525, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.655173] env[69994]: DEBUG nova.scheduler.client.report [None req-7229e759-3c35-4ed6-8c43-dec9f64a4d33 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1096.685603] env[69994]: DEBUG oslo_vmware.api [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52aedb21-5273-7de9-ec20-cccfa4bc6f88, 'name': SearchDatastore_Task, 'duration_secs': 0.00811} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.688454] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85ce4b88-b33f-40c7-86ff-6d0fa6d1574d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.692758] env[69994]: DEBUG oslo_vmware.api [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1096.692758] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52821b2d-389e-5a0b-f089-b243eb4aa6ff" [ 1096.692758] env[69994]: _type = "Task" [ 1096.692758] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.702461] env[69994]: DEBUG oslo_vmware.api [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52821b2d-389e-5a0b-f089-b243eb4aa6ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.737168] env[69994]: DEBUG nova.network.neutron [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Port b2521bc7-942e-4d29-bc89-0fd13a02f783 binding to destination host cpu-1 is already ACTIVE {{(pid=69994) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1096.737445] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "refresh_cache-e17fcc84-7c86-41b6-88ec-8a35619534b6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1096.737597] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquired lock "refresh_cache-e17fcc84-7c86-41b6-88ec-8a35619534b6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1096.737755] env[69994]: DEBUG nova.network.neutron [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1097.051879] env[69994]: DEBUG oslo_vmware.api [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': task-3242525, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.104182} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.052153] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1097.052341] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1097.052522] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1097.160494] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7229e759-3c35-4ed6-8c43-dec9f64a4d33 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.776s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1097.162970] env[69994]: DEBUG oslo_concurrency.lockutils [None req-202ece9c-e47b-4fca-9d83-9e1e4bdbbb8c tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.123s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1097.163168] env[69994]: DEBUG nova.objects.instance [None req-202ece9c-e47b-4fca-9d83-9e1e4bdbbb8c tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lazy-loading 'resources' on Instance uuid 1a5b269f-5ee8-4bcc-812e-78388edb1e50 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1097.181484] env[69994]: INFO nova.scheduler.client.report [None req-7229e759-3c35-4ed6-8c43-dec9f64a4d33 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Deleted allocations for instance fc31da72-d09e-415e-9866-3e7fc91fec79 [ 1097.206566] env[69994]: DEBUG oslo_vmware.api [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52821b2d-389e-5a0b-f089-b243eb4aa6ff, 'name': SearchDatastore_Task, 'duration_secs': 0.00984} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.206838] env[69994]: DEBUG oslo_concurrency.lockutils [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1097.207573] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 19fedc80-8def-426a-af73-ad871e127e02/19fedc80-8def-426a-af73-ad871e127e02.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1097.207573] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ae02d961-8bbe-4fc0-80b7-07c50fd6b476 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.215637] env[69994]: DEBUG oslo_vmware.api [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1097.215637] env[69994]: value = "task-3242526" [ 1097.215637] env[69994]: _type = "Task" [ 1097.215637] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.224950] env[69994]: DEBUG oslo_vmware.api [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242526, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.337244] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2d0bb069-7275-46e4-8b22-1fb62aa6dfed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "ab99499b-21a2-465b-9975-4e0adb18df94" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1097.337528] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2d0bb069-7275-46e4-8b22-1fb62aa6dfed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "ab99499b-21a2-465b-9975-4e0adb18df94" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1097.337773] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2d0bb069-7275-46e4-8b22-1fb62aa6dfed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "ab99499b-21a2-465b-9975-4e0adb18df94-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1097.337909] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2d0bb069-7275-46e4-8b22-1fb62aa6dfed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "ab99499b-21a2-465b-9975-4e0adb18df94-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1097.338163] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2d0bb069-7275-46e4-8b22-1fb62aa6dfed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "ab99499b-21a2-465b-9975-4e0adb18df94-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1097.340760] env[69994]: INFO nova.compute.manager [None req-2d0bb069-7275-46e4-8b22-1fb62aa6dfed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Terminating instance [ 1097.522257] env[69994]: DEBUG nova.network.neutron [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Updating instance_info_cache with network_info: [{"id": "b2521bc7-942e-4d29-bc89-0fd13a02f783", "address": "fa:16:3e:90:f2:06", "network": {"id": "6d4a143e-4f55-4918-8454-462892aacf0e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1594130263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "605d72502cc644bfa4d875bf348246de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52e117d3-d120-42c6-8e72-70085845acbf", "external-id": "nsx-vlan-transportzone-934", "segmentation_id": 934, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2521bc7-94", "ovs_interfaceid": "b2521bc7-942e-4d29-bc89-0fd13a02f783", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1097.691393] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7229e759-3c35-4ed6-8c43-dec9f64a4d33 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "fc31da72-d09e-415e-9866-3e7fc91fec79" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.943s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1097.725870] env[69994]: DEBUG oslo_vmware.api [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242526, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.471838} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.728439] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 19fedc80-8def-426a-af73-ad871e127e02/19fedc80-8def-426a-af73-ad871e127e02.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1097.728686] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1097.729116] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-be63e793-86f3-4ebf-bfcd-8d75e15f2985 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.735539] env[69994]: DEBUG oslo_vmware.api [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1097.735539] env[69994]: value = "task-3242527" [ 1097.735539] env[69994]: _type = "Task" [ 1097.735539] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.746167] env[69994]: DEBUG oslo_vmware.api [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242527, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.821366] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "d8d2958c-e44c-4796-becc-c572057f7ba5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1097.821598] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "d8d2958c-e44c-4796-becc-c572057f7ba5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1097.844701] env[69994]: DEBUG nova.compute.manager [None req-2d0bb069-7275-46e4-8b22-1fb62aa6dfed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1097.844909] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2d0bb069-7275-46e4-8b22-1fb62aa6dfed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1097.845757] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fff9dad0-67ad-4e2a-b44d-b9e93fe13658 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.852961] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d0bb069-7275-46e4-8b22-1fb62aa6dfed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1097.853857] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-87315947-1554-4ace-aaa1-0714e42038b8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.855854] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e30e6924-837e-4373-b3ba-9f1a05b083d8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.863539] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3274ee29-54a2-430a-8c72-e2ab5ef1eb40 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.866225] env[69994]: DEBUG oslo_vmware.api [None req-2d0bb069-7275-46e4-8b22-1fb62aa6dfed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1097.866225] env[69994]: value = "task-3242528" [ 1097.866225] env[69994]: _type = "Task" [ 1097.866225] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.897080] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fca8731-3b9b-445f-8d2a-46e4996bf099 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.902736] env[69994]: DEBUG oslo_vmware.api [None req-2d0bb069-7275-46e4-8b22-1fb62aa6dfed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242528, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.907210] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89dbf429-2cb5-4ab4-9e33-de6e3f80a660 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.920129] env[69994]: DEBUG nova.compute.provider_tree [None req-202ece9c-e47b-4fca-9d83-9e1e4bdbbb8c tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1098.025267] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Releasing lock "refresh_cache-e17fcc84-7c86-41b6-88ec-8a35619534b6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1098.092278] env[69994]: DEBUG nova.virt.hardware [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1098.092278] env[69994]: DEBUG nova.virt.hardware [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1098.092278] env[69994]: DEBUG nova.virt.hardware [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1098.092278] env[69994]: DEBUG nova.virt.hardware [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1098.092278] env[69994]: DEBUG nova.virt.hardware [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1098.092684] env[69994]: DEBUG nova.virt.hardware [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1098.092684] env[69994]: DEBUG nova.virt.hardware [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1098.092768] env[69994]: DEBUG nova.virt.hardware [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1098.092942] env[69994]: DEBUG nova.virt.hardware [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1098.093177] env[69994]: DEBUG nova.virt.hardware [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1098.093385] env[69994]: DEBUG nova.virt.hardware [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1098.094575] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bca193bc-82a5-4e35-8d23-912da0467480 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.103844] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e18f67de-94c3-4d2d-91cf-499d5be19990 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.119067] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Instance VIF info [] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1098.124710] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1098.124943] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1098.125182] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d3941290-4b2c-49b9-a2ca-bffbf16ec9ec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.141331] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1098.141331] env[69994]: value = "task-3242529" [ 1098.141331] env[69994]: _type = "Task" [ 1098.141331] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.151923] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242529, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.245168] env[69994]: DEBUG oslo_vmware.api [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242527, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074261} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.245438] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1098.246216] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fb4f992-8cd5-48f8-b622-49108d2b8a9f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.268017] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Reconfiguring VM instance instance-00000061 to attach disk [datastore2] 19fedc80-8def-426a-af73-ad871e127e02/19fedc80-8def-426a-af73-ad871e127e02.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1098.268313] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-62de77f5-fbfa-47ef-ac0a-d52c70fa5e26 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.287803] env[69994]: DEBUG oslo_vmware.api [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1098.287803] env[69994]: value = "task-3242530" [ 1098.287803] env[69994]: _type = "Task" [ 1098.287803] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.296619] env[69994]: DEBUG oslo_vmware.api [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242530, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.323930] env[69994]: DEBUG nova.compute.manager [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1098.377172] env[69994]: DEBUG oslo_vmware.api [None req-2d0bb069-7275-46e4-8b22-1fb62aa6dfed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242528, 'name': PowerOffVM_Task, 'duration_secs': 0.206702} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.377441] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d0bb069-7275-46e4-8b22-1fb62aa6dfed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1098.377608] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2d0bb069-7275-46e4-8b22-1fb62aa6dfed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1098.377851] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d284ee31-6ff9-43b1-99ea-67ae295e79fd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.423175] env[69994]: DEBUG nova.scheduler.client.report [None req-202ece9c-e47b-4fca-9d83-9e1e4bdbbb8c tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1098.529067] env[69994]: DEBUG nova.compute.manager [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=69994) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1098.651927] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242529, 'name': CreateVM_Task, 'duration_secs': 0.303995} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.652137] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1098.652560] env[69994]: DEBUG oslo_concurrency.lockutils [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.652717] env[69994]: DEBUG oslo_concurrency.lockutils [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1098.653047] env[69994]: DEBUG oslo_concurrency.lockutils [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1098.653291] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a055e1a0-caf4-42ad-968f-02d964d1e03a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.657396] env[69994]: DEBUG oslo_vmware.api [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Waiting for the task: (returnval){ [ 1098.657396] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5232d7d4-dee6-bba8-be22-eb00bef06a2c" [ 1098.657396] env[69994]: _type = "Task" [ 1098.657396] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.664670] env[69994]: DEBUG oslo_vmware.api [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5232d7d4-dee6-bba8-be22-eb00bef06a2c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.797483] env[69994]: DEBUG oslo_vmware.api [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242530, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.849991] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1098.927897] env[69994]: DEBUG oslo_concurrency.lockutils [None req-202ece9c-e47b-4fca-9d83-9e1e4bdbbb8c tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.765s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1098.930257] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.170s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1098.932013] env[69994]: INFO nova.compute.claims [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1098.946375] env[69994]: INFO nova.scheduler.client.report [None req-202ece9c-e47b-4fca-9d83-9e1e4bdbbb8c tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Deleted allocations for instance 1a5b269f-5ee8-4bcc-812e-78388edb1e50 [ 1099.169959] env[69994]: DEBUG oslo_vmware.api [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5232d7d4-dee6-bba8-be22-eb00bef06a2c, 'name': SearchDatastore_Task, 'duration_secs': 0.009851} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.170297] env[69994]: DEBUG oslo_concurrency.lockutils [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1099.170541] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1099.170818] env[69994]: DEBUG oslo_concurrency.lockutils [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.170913] env[69994]: DEBUG oslo_concurrency.lockutils [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1099.171142] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1099.171416] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-87fab3a4-2bb2-4645-870f-e6b1cdf4dbb4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.180123] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1099.180311] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1099.181060] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a1a72de-9acc-4542-8585-e42f3d850528 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.873728] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1099.876522] env[69994]: DEBUG oslo_concurrency.lockutils [None req-202ece9c-e47b-4fca-9d83-9e1e4bdbbb8c tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "1a5b269f-5ee8-4bcc-812e-78388edb1e50" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.022s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1099.880940] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "be421d40-9859-4e0d-aef8-a2feb8717a78" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1099.880940] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "be421d40-9859-4e0d-aef8-a2feb8717a78" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1099.885332] env[69994]: DEBUG oslo_vmware.api [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Waiting for the task: (returnval){ [ 1099.885332] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52282c06-9acc-d333-c31d-412ad2dc8e01" [ 1099.885332] env[69994]: _type = "Task" [ 1099.885332] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.889607] env[69994]: DEBUG oslo_vmware.api [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242530, 'name': ReconfigVM_Task, 'duration_secs': 0.85173} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.892721] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Reconfigured VM instance instance-00000061 to attach disk [datastore2] 19fedc80-8def-426a-af73-ad871e127e02/19fedc80-8def-426a-af73-ad871e127e02.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1099.893489] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e0e6a1fa-8b02-4bea-bff9-402ac99e0c0c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.900764] env[69994]: DEBUG oslo_vmware.api [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52282c06-9acc-d333-c31d-412ad2dc8e01, 'name': SearchDatastore_Task, 'duration_secs': 0.010249} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.902282] env[69994]: DEBUG oslo_vmware.api [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1099.902282] env[69994]: value = "task-3242532" [ 1099.902282] env[69994]: _type = "Task" [ 1099.902282] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.902460] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10e1de10-1023-43b4-a921-784988be87a7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.913298] env[69994]: DEBUG oslo_vmware.api [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242532, 'name': Rename_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.913624] env[69994]: DEBUG oslo_vmware.api [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Waiting for the task: (returnval){ [ 1099.913624] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]524ddc08-af7e-84b6-d725-a603b1388254" [ 1099.913624] env[69994]: _type = "Task" [ 1099.913624] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.921491] env[69994]: DEBUG oslo_vmware.api [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]524ddc08-af7e-84b6-d725-a603b1388254, 'name': SearchDatastore_Task, 'duration_secs': 0.008836} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.921723] env[69994]: DEBUG oslo_concurrency.lockutils [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1099.921983] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] dd5ebc73-5866-4a5b-9d4f-aac721b0da8d/dd5ebc73-5866-4a5b-9d4f-aac721b0da8d.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1099.922288] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5c8bc3d4-3a16-471c-bba2-cddaf0802af0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.927857] env[69994]: DEBUG oslo_vmware.api [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Waiting for the task: (returnval){ [ 1099.927857] env[69994]: value = "task-3242533" [ 1099.927857] env[69994]: _type = "Task" [ 1099.927857] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.935344] env[69994]: DEBUG oslo_vmware.api [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': task-3242533, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.158895] env[69994]: DEBUG oslo_concurrency.lockutils [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "ca237467-eafc-4c18-a56e-98b94d111c92" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1100.159241] env[69994]: DEBUG oslo_concurrency.lockutils [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "ca237467-eafc-4c18-a56e-98b94d111c92" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1100.383496] env[69994]: DEBUG nova.compute.manager [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1100.414452] env[69994]: DEBUG oslo_vmware.api [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242532, 'name': Rename_Task, 'duration_secs': 0.128257} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.417463] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1100.418313] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5f167433-9b68-48ae-b528-224534709bed {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.423882] env[69994]: DEBUG oslo_vmware.api [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1100.423882] env[69994]: value = "task-3242534" [ 1100.423882] env[69994]: _type = "Task" [ 1100.423882] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.435475] env[69994]: DEBUG oslo_vmware.api [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242534, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.442874] env[69994]: DEBUG oslo_vmware.api [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': task-3242533, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.446065} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.445282] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] dd5ebc73-5866-4a5b-9d4f-aac721b0da8d/dd5ebc73-5866-4a5b-9d4f-aac721b0da8d.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1100.445498] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1100.446075] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fa946535-ffb8-4042-9ffc-aa6299a7cd1a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.451623] env[69994]: DEBUG oslo_vmware.api [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Waiting for the task: (returnval){ [ 1100.451623] env[69994]: value = "task-3242535" [ 1100.451623] env[69994]: _type = "Task" [ 1100.451623] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.463573] env[69994]: DEBUG oslo_vmware.api [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': task-3242535, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.562284] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0fc6609-724e-4bc2-98ca-da3379c8be05 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.569583] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1415540-64c2-4e53-8fbb-b9896fa6684d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.598262] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f36ab3a-f6dc-4a1c-b14d-fed5b2978231 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.605211] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f5305c9-4725-41e4-881e-116fca2fe514 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.617924] env[69994]: DEBUG nova.compute.provider_tree [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1100.661539] env[69994]: DEBUG nova.compute.manager [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1100.903206] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1100.933320] env[69994]: DEBUG oslo_vmware.api [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242534, 'name': PowerOnVM_Task, 'duration_secs': 0.437842} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.933601] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1100.933895] env[69994]: INFO nova.compute.manager [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Took 8.60 seconds to spawn the instance on the hypervisor. [ 1100.934091] env[69994]: DEBUG nova.compute.manager [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1100.934830] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-033fc7e8-2820-4c8b-a2c1-4f191a428ec0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.962117] env[69994]: DEBUG oslo_vmware.api [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': task-3242535, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075564} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.962406] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1100.963263] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-051da664-aea4-453f-a625-fe7ea869b0af {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.983042] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] dd5ebc73-5866-4a5b-9d4f-aac721b0da8d/dd5ebc73-5866-4a5b-9d4f-aac721b0da8d.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1100.983978] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a3b2f74-fd44-4643-9915-3deed3192de7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.003304] env[69994]: DEBUG oslo_vmware.api [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Waiting for the task: (returnval){ [ 1101.003304] env[69994]: value = "task-3242536" [ 1101.003304] env[69994]: _type = "Task" [ 1101.003304] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.011762] env[69994]: DEBUG oslo_vmware.api [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': task-3242536, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.121655] env[69994]: DEBUG nova.scheduler.client.report [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1101.183817] env[69994]: DEBUG oslo_concurrency.lockutils [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1101.405277] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2d0bb069-7275-46e4-8b22-1fb62aa6dfed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1101.405574] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2d0bb069-7275-46e4-8b22-1fb62aa6dfed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1101.405771] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d0bb069-7275-46e4-8b22-1fb62aa6dfed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Deleting the datastore file [datastore1] ab99499b-21a2-465b-9975-4e0adb18df94 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1101.406061] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-85d5e905-6185-4912-ab16-90f70d0a7f8b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.413199] env[69994]: DEBUG oslo_vmware.api [None req-2d0bb069-7275-46e4-8b22-1fb62aa6dfed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1101.413199] env[69994]: value = "task-3242537" [ 1101.413199] env[69994]: _type = "Task" [ 1101.413199] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.421347] env[69994]: DEBUG oslo_vmware.api [None req-2d0bb069-7275-46e4-8b22-1fb62aa6dfed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242537, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.452959] env[69994]: INFO nova.compute.manager [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Took 21.88 seconds to build instance. [ 1101.513709] env[69994]: DEBUG oslo_vmware.api [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': task-3242536, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.626801] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.696s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1101.627980] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5c641e90-cf6a-4636-b85c-02ce8fd9114a tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.788s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1101.628237] env[69994]: DEBUG nova.objects.instance [None req-5c641e90-cf6a-4636-b85c-02ce8fd9114a tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lazy-loading 'resources' on Instance uuid b99b73e6-3348-4d5d-aa57-f01ace0bfc42 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1101.923418] env[69994]: DEBUG oslo_vmware.api [None req-2d0bb069-7275-46e4-8b22-1fb62aa6dfed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242537, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.452587} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.923790] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d0bb069-7275-46e4-8b22-1fb62aa6dfed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1101.923895] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2d0bb069-7275-46e4-8b22-1fb62aa6dfed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1101.924131] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2d0bb069-7275-46e4-8b22-1fb62aa6dfed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1101.924326] env[69994]: INFO nova.compute.manager [None req-2d0bb069-7275-46e4-8b22-1fb62aa6dfed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Took 4.08 seconds to destroy the instance on the hypervisor. [ 1101.924628] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2d0bb069-7275-46e4-8b22-1fb62aa6dfed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1101.924845] env[69994]: DEBUG nova.compute.manager [-] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1101.924950] env[69994]: DEBUG nova.network.neutron [-] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1101.954771] env[69994]: DEBUG oslo_concurrency.lockutils [None req-841d87c9-e17d-4d8a-9792-d715104a23c5 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "19fedc80-8def-426a-af73-ad871e127e02" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.393s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1102.013785] env[69994]: DEBUG oslo_vmware.api [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': task-3242536, 'name': ReconfigVM_Task, 'duration_secs': 0.710177} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.013964] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Reconfigured VM instance instance-00000060 to attach disk [datastore2] dd5ebc73-5866-4a5b-9d4f-aac721b0da8d/dd5ebc73-5866-4a5b-9d4f-aac721b0da8d.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1102.014491] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d1b0f8f7-4390-403f-aae3-cf752c78bebf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.021726] env[69994]: DEBUG oslo_vmware.api [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Waiting for the task: (returnval){ [ 1102.021726] env[69994]: value = "task-3242538" [ 1102.021726] env[69994]: _type = "Task" [ 1102.021726] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.029459] env[69994]: DEBUG oslo_vmware.api [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': task-3242538, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.130986] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Acquiring lock "f8218754-3483-4fb0-824e-62904984d399" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1102.130986] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Lock "f8218754-3483-4fb0-824e-62904984d399" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1102.190878] env[69994]: DEBUG nova.compute.manager [req-837e4ac9-2119-4607-99bf-0e3979b4a5c0 req-928c14b8-8e58-4523-9aaa-80d779bdb66f service nova] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Received event network-vif-deleted-92378003-993a-43f2-8823-55a4b83acdef {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1102.191179] env[69994]: INFO nova.compute.manager [req-837e4ac9-2119-4607-99bf-0e3979b4a5c0 req-928c14b8-8e58-4523-9aaa-80d779bdb66f service nova] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Neutron deleted interface 92378003-993a-43f2-8823-55a4b83acdef; detaching it from the instance and deleting it from the info cache [ 1102.191379] env[69994]: DEBUG nova.network.neutron [req-837e4ac9-2119-4607-99bf-0e3979b4a5c0 req-928c14b8-8e58-4523-9aaa-80d779bdb66f service nova] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1102.299704] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54b07f49-1e49-41fc-a49e-1e21d8d12b63 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.306960] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82d7aabb-c4e2-4838-808b-4a276c22d247 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.336437] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1120da0-b523-410b-85b7-60d7b1be53aa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.343913] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1566b517-3b14-40a7-962f-aec1af837803 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.357371] env[69994]: DEBUG nova.compute.provider_tree [None req-5c641e90-cf6a-4636-b85c-02ce8fd9114a tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1102.535114] env[69994]: DEBUG oslo_vmware.api [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': task-3242538, 'name': Rename_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.633187] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Lock "f8218754-3483-4fb0-824e-62904984d399" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.502s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1102.633771] env[69994]: DEBUG nova.compute.manager [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1102.674975] env[69994]: DEBUG nova.network.neutron [-] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1102.693445] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-268cfa91-ff1c-48cd-b6ba-7a37e77641ae {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.702963] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-244a40d9-e608-4ed8-929b-cda0482d6e30 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.730334] env[69994]: DEBUG nova.compute.manager [req-837e4ac9-2119-4607-99bf-0e3979b4a5c0 req-928c14b8-8e58-4523-9aaa-80d779bdb66f service nova] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Detach interface failed, port_id=92378003-993a-43f2-8823-55a4b83acdef, reason: Instance ab99499b-21a2-465b-9975-4e0adb18df94 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1102.793262] env[69994]: DEBUG nova.compute.manager [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1102.794151] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69f81bc9-9258-48af-95a5-30a12be14276 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.860672] env[69994]: DEBUG nova.scheduler.client.report [None req-5c641e90-cf6a-4636-b85c-02ce8fd9114a tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1103.032839] env[69994]: DEBUG oslo_vmware.api [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': task-3242538, 'name': Rename_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.139047] env[69994]: DEBUG nova.compute.utils [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1103.140361] env[69994]: DEBUG nova.compute.manager [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1103.140552] env[69994]: DEBUG nova.network.neutron [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1103.177553] env[69994]: INFO nova.compute.manager [-] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Took 1.25 seconds to deallocate network for instance. [ 1103.200615] env[69994]: DEBUG nova.policy [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a58ec814764141448ab44ffdc785afe9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '25ae5db852d74e62aa2a72e59cfa3383', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1103.304789] env[69994]: INFO nova.compute.manager [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] instance snapshotting [ 1103.307757] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01072b51-1c8a-47e3-9908-d3123a02fb70 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.327891] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e721af6-af88-4847-82e0-a39ab1953a7a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.365278] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5c641e90-cf6a-4636-b85c-02ce8fd9114a tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.737s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1103.367493] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 10.182s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1103.391572] env[69994]: INFO nova.scheduler.client.report [None req-5c641e90-cf6a-4636-b85c-02ce8fd9114a tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Deleted allocations for instance b99b73e6-3348-4d5d-aa57-f01ace0bfc42 [ 1103.507379] env[69994]: DEBUG nova.network.neutron [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Successfully created port: ae87b679-9aae-4b69-9339-d14c469c779b {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1103.539912] env[69994]: DEBUG oslo_vmware.api [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': task-3242538, 'name': Rename_Task, 'duration_secs': 1.12411} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.540455] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1103.540888] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a36a9c94-3712-462d-bef1-3b221556dcd2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.549723] env[69994]: DEBUG oslo_vmware.api [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Waiting for the task: (returnval){ [ 1103.549723] env[69994]: value = "task-3242539" [ 1103.549723] env[69994]: _type = "Task" [ 1103.549723] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.562176] env[69994]: DEBUG oslo_vmware.api [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': task-3242539, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.644700] env[69994]: DEBUG nova.compute.manager [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1103.686869] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2d0bb069-7275-46e4-8b22-1fb62aa6dfed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1103.839219] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Creating Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1103.839557] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-9afcc067-3f09-4e0f-a7a5-0ad15a6df70e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.847210] env[69994]: DEBUG oslo_vmware.api [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1103.847210] env[69994]: value = "task-3242540" [ 1103.847210] env[69994]: _type = "Task" [ 1103.847210] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.855554] env[69994]: DEBUG oslo_vmware.api [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242540, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.898850] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5c641e90-cf6a-4636-b85c-02ce8fd9114a tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lock "b99b73e6-3348-4d5d-aa57-f01ace0bfc42" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.537s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1104.059788] env[69994]: DEBUG oslo_vmware.api [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': task-3242539, 'name': PowerOnVM_Task, 'duration_secs': 0.415731} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.060099] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1104.060291] env[69994]: DEBUG nova.compute.manager [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1104.061112] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db2dfde8-b37b-4abd-94af-4a089cf957e4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.116426] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4dca99e8-faa9-4dd9-99d5-277ee8535de8 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquiring lock "ed662f67-be0e-4f19-bb8a-6af39b4d348c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1104.116709] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4dca99e8-faa9-4dd9-99d5-277ee8535de8 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lock "ed662f67-be0e-4f19-bb8a-6af39b4d348c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1104.116918] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4dca99e8-faa9-4dd9-99d5-277ee8535de8 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquiring lock "ed662f67-be0e-4f19-bb8a-6af39b4d348c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1104.117113] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4dca99e8-faa9-4dd9-99d5-277ee8535de8 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lock "ed662f67-be0e-4f19-bb8a-6af39b4d348c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1104.117286] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4dca99e8-faa9-4dd9-99d5-277ee8535de8 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lock "ed662f67-be0e-4f19-bb8a-6af39b4d348c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1104.119610] env[69994]: INFO nova.compute.manager [None req-4dca99e8-faa9-4dd9-99d5-277ee8535de8 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Terminating instance [ 1104.357069] env[69994]: DEBUG oslo_vmware.api [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242540, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.378324] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Applying migration context for instance e17fcc84-7c86-41b6-88ec-8a35619534b6 as it has an incoming, in-progress migration 333534b2-bb77-4949-8bf8-98da2d12ec07. Migration status is reverting {{(pid=69994) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1104.379616] env[69994]: INFO nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Updating resource usage from migration 333534b2-bb77-4949-8bf8-98da2d12ec07 [ 1104.399921] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance ed662f67-be0e-4f19-bb8a-6af39b4d348c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1104.400080] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance eff21ec5-a51d-4004-9edf-1891f706fe9c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1104.400234] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance ef37ce64-2c26-4080-899a-6d9dbb5850c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1104.400392] env[69994]: WARNING nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance ab99499b-21a2-465b-9975-4e0adb18df94 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1104.400510] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance e6acdc45-5e8f-4ff0-9259-3de73a6fdd14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1104.400626] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Migration 333534b2-bb77-4949-8bf8-98da2d12ec07 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1104.400739] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance e17fcc84-7c86-41b6-88ec-8a35619534b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1104.400850] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance dd5ebc73-5866-4a5b-9d4f-aac721b0da8d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1104.400959] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 19fedc80-8def-426a-af73-ad871e127e02 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1104.401134] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 05993c51-605c-4154-afc1-f3bc5344258c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1104.577515] env[69994]: DEBUG oslo_concurrency.lockutils [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1104.624618] env[69994]: DEBUG nova.compute.manager [None req-4dca99e8-faa9-4dd9-99d5-277ee8535de8 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1104.624858] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4dca99e8-faa9-4dd9-99d5-277ee8535de8 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1104.625814] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5535dc50-fe1a-466c-a318-d37b668311c3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.634712] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dca99e8-faa9-4dd9-99d5-277ee8535de8 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1104.634956] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-468b963c-40b4-4cef-b982-567f8aa13f3e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.641265] env[69994]: DEBUG oslo_vmware.api [None req-4dca99e8-faa9-4dd9-99d5-277ee8535de8 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 1104.641265] env[69994]: value = "task-3242541" [ 1104.641265] env[69994]: _type = "Task" [ 1104.641265] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.648806] env[69994]: DEBUG oslo_vmware.api [None req-4dca99e8-faa9-4dd9-99d5-277ee8535de8 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242541, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.653984] env[69994]: DEBUG nova.compute.manager [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1104.678142] env[69994]: DEBUG nova.virt.hardware [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1104.678413] env[69994]: DEBUG nova.virt.hardware [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1104.678581] env[69994]: DEBUG nova.virt.hardware [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1104.678776] env[69994]: DEBUG nova.virt.hardware [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1104.678997] env[69994]: DEBUG nova.virt.hardware [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1104.679184] env[69994]: DEBUG nova.virt.hardware [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1104.679394] env[69994]: DEBUG nova.virt.hardware [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1104.679552] env[69994]: DEBUG nova.virt.hardware [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1104.679717] env[69994]: DEBUG nova.virt.hardware [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1104.679879] env[69994]: DEBUG nova.virt.hardware [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1104.680059] env[69994]: DEBUG nova.virt.hardware [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1104.680903] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-626ef37c-d930-46e1-b635-855b517fa3ee {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.688689] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0393131b-c801-4d52-997a-314672883532 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.822882] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6833bb18-be3a-4073-85c5-aef6314cde3a tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Acquiring lock "dd5ebc73-5866-4a5b-9d4f-aac721b0da8d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1104.823189] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6833bb18-be3a-4073-85c5-aef6314cde3a tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Lock "dd5ebc73-5866-4a5b-9d4f-aac721b0da8d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1104.823409] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6833bb18-be3a-4073-85c5-aef6314cde3a tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Acquiring lock "dd5ebc73-5866-4a5b-9d4f-aac721b0da8d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1104.823624] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6833bb18-be3a-4073-85c5-aef6314cde3a tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Lock "dd5ebc73-5866-4a5b-9d4f-aac721b0da8d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1104.823805] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6833bb18-be3a-4073-85c5-aef6314cde3a tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Lock "dd5ebc73-5866-4a5b-9d4f-aac721b0da8d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1104.826037] env[69994]: INFO nova.compute.manager [None req-6833bb18-be3a-4073-85c5-aef6314cde3a tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Terminating instance [ 1104.857836] env[69994]: DEBUG oslo_vmware.api [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242540, 'name': CreateSnapshot_Task, 'duration_secs': 0.986646} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.858269] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Created Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1104.859074] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b6b564e-f144-4e0c-948a-896fc52fdf71 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.905132] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance d8d2958c-e44c-4796-becc-c572057f7ba5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1105.035928] env[69994]: DEBUG nova.compute.manager [req-fef558ec-b5ac-49ad-8369-04dcfd63238c req-e5d6d2a1-5d61-4ac0-8ed8-f8e952714efd service nova] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Received event network-vif-plugged-ae87b679-9aae-4b69-9339-d14c469c779b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1105.036161] env[69994]: DEBUG oslo_concurrency.lockutils [req-fef558ec-b5ac-49ad-8369-04dcfd63238c req-e5d6d2a1-5d61-4ac0-8ed8-f8e952714efd service nova] Acquiring lock "05993c51-605c-4154-afc1-f3bc5344258c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1105.036368] env[69994]: DEBUG oslo_concurrency.lockutils [req-fef558ec-b5ac-49ad-8369-04dcfd63238c req-e5d6d2a1-5d61-4ac0-8ed8-f8e952714efd service nova] Lock "05993c51-605c-4154-afc1-f3bc5344258c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1105.036606] env[69994]: DEBUG oslo_concurrency.lockutils [req-fef558ec-b5ac-49ad-8369-04dcfd63238c req-e5d6d2a1-5d61-4ac0-8ed8-f8e952714efd service nova] Lock "05993c51-605c-4154-afc1-f3bc5344258c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1105.036799] env[69994]: DEBUG nova.compute.manager [req-fef558ec-b5ac-49ad-8369-04dcfd63238c req-e5d6d2a1-5d61-4ac0-8ed8-f8e952714efd service nova] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] No waiting events found dispatching network-vif-plugged-ae87b679-9aae-4b69-9339-d14c469c779b {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1105.037009] env[69994]: WARNING nova.compute.manager [req-fef558ec-b5ac-49ad-8369-04dcfd63238c req-e5d6d2a1-5d61-4ac0-8ed8-f8e952714efd service nova] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Received unexpected event network-vif-plugged-ae87b679-9aae-4b69-9339-d14c469c779b for instance with vm_state building and task_state spawning. [ 1105.151078] env[69994]: DEBUG oslo_vmware.api [None req-4dca99e8-faa9-4dd9-99d5-277ee8535de8 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242541, 'name': PowerOffVM_Task, 'duration_secs': 0.264623} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.151460] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dca99e8-faa9-4dd9-99d5-277ee8535de8 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1105.151536] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4dca99e8-faa9-4dd9-99d5-277ee8535de8 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1105.151793] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0f00def3-0f82-46a5-8064-ebd6e4079031 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.215694] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4dca99e8-faa9-4dd9-99d5-277ee8535de8 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1105.215908] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4dca99e8-faa9-4dd9-99d5-277ee8535de8 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1105.216111] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-4dca99e8-faa9-4dd9-99d5-277ee8535de8 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Deleting the datastore file [datastore2] ed662f67-be0e-4f19-bb8a-6af39b4d348c {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1105.216375] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0bcfea77-2f95-4b44-9531-07c6f5552538 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.223096] env[69994]: DEBUG oslo_vmware.api [None req-4dca99e8-faa9-4dd9-99d5-277ee8535de8 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for the task: (returnval){ [ 1105.223096] env[69994]: value = "task-3242543" [ 1105.223096] env[69994]: _type = "Task" [ 1105.223096] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.230470] env[69994]: DEBUG oslo_vmware.api [None req-4dca99e8-faa9-4dd9-99d5-277ee8535de8 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242543, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.329971] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6833bb18-be3a-4073-85c5-aef6314cde3a tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Acquiring lock "refresh_cache-dd5ebc73-5866-4a5b-9d4f-aac721b0da8d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1105.330177] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6833bb18-be3a-4073-85c5-aef6314cde3a tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Acquired lock "refresh_cache-dd5ebc73-5866-4a5b-9d4f-aac721b0da8d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1105.330356] env[69994]: DEBUG nova.network.neutron [None req-6833bb18-be3a-4073-85c5-aef6314cde3a tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1105.376816] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Creating linked-clone VM from snapshot {{(pid=69994) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1105.377380] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-2a873d88-d6c6-41b4-b688-a6ccee295fb7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.387713] env[69994]: DEBUG oslo_vmware.api [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1105.387713] env[69994]: value = "task-3242544" [ 1105.387713] env[69994]: _type = "Task" [ 1105.387713] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.395207] env[69994]: DEBUG nova.network.neutron [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Successfully updated port: ae87b679-9aae-4b69-9339-d14c469c779b {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1105.402099] env[69994]: DEBUG oslo_vmware.api [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242544, 'name': CloneVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.407872] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance be421d40-9859-4e0d-aef8-a2feb8717a78 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1105.733502] env[69994]: DEBUG oslo_vmware.api [None req-4dca99e8-faa9-4dd9-99d5-277ee8535de8 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Task: {'id': task-3242543, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.217529} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.733758] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-4dca99e8-faa9-4dd9-99d5-277ee8535de8 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1105.733942] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4dca99e8-faa9-4dd9-99d5-277ee8535de8 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1105.734132] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4dca99e8-faa9-4dd9-99d5-277ee8535de8 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1105.734326] env[69994]: INFO nova.compute.manager [None req-4dca99e8-faa9-4dd9-99d5-277ee8535de8 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1105.734546] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4dca99e8-faa9-4dd9-99d5-277ee8535de8 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1105.734728] env[69994]: DEBUG nova.compute.manager [-] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1105.734819] env[69994]: DEBUG nova.network.neutron [-] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1105.851443] env[69994]: DEBUG nova.network.neutron [None req-6833bb18-be3a-4073-85c5-aef6314cde3a tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1105.902598] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Acquiring lock "refresh_cache-05993c51-605c-4154-afc1-f3bc5344258c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1105.903169] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Acquired lock "refresh_cache-05993c51-605c-4154-afc1-f3bc5344258c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1105.903169] env[69994]: DEBUG nova.network.neutron [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1105.904619] env[69994]: DEBUG oslo_vmware.api [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242544, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.913815] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance ca237467-eafc-4c18-a56e-98b94d111c92 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1105.913815] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1105.913815] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2304MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1105.916875] env[69994]: DEBUG nova.network.neutron [None req-6833bb18-be3a-4073-85c5-aef6314cde3a tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1106.116626] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceea5fc8-0ddb-4dea-93ee-5d424e6f15b9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.125036] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6bb71b5-0a3a-4ba5-b246-1164bfa415fa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.156886] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb736fa8-4492-47d4-b5c5-410d6734c52a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.164871] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-377479c3-4bf7-433a-8d78-a5806cd16176 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.179174] env[69994]: DEBUG nova.compute.provider_tree [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1106.401398] env[69994]: DEBUG oslo_vmware.api [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242544, 'name': CloneVM_Task} progress is 95%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.419908] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6833bb18-be3a-4073-85c5-aef6314cde3a tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Releasing lock "refresh_cache-dd5ebc73-5866-4a5b-9d4f-aac721b0da8d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1106.420654] env[69994]: DEBUG nova.compute.manager [None req-6833bb18-be3a-4073-85c5-aef6314cde3a tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1106.421020] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6833bb18-be3a-4073-85c5-aef6314cde3a tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1106.422423] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-567c4f78-ce84-4ee5-a12c-3e5d4394b58f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.430527] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6833bb18-be3a-4073-85c5-aef6314cde3a tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1106.431778] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aa1af37b-5327-4a96-af92-b2b9c3548822 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.438305] env[69994]: DEBUG oslo_vmware.api [None req-6833bb18-be3a-4073-85c5-aef6314cde3a tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Waiting for the task: (returnval){ [ 1106.438305] env[69994]: value = "task-3242545" [ 1106.438305] env[69994]: _type = "Task" [ 1106.438305] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.442578] env[69994]: DEBUG nova.network.neutron [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1106.451389] env[69994]: DEBUG oslo_vmware.api [None req-6833bb18-be3a-4073-85c5-aef6314cde3a tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': task-3242545, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.628639] env[69994]: DEBUG nova.network.neutron [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Updating instance_info_cache with network_info: [{"id": "ae87b679-9aae-4b69-9339-d14c469c779b", "address": "fa:16:3e:ba:95:a9", "network": {"id": "6b3cbf7e-5b76-4899-a67c-5cccdeb5d269", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1170571372-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25ae5db852d74e62aa2a72e59cfa3383", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae87b679-9a", "ovs_interfaceid": "ae87b679-9aae-4b69-9339-d14c469c779b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1106.660257] env[69994]: DEBUG nova.network.neutron [-] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1106.682765] env[69994]: DEBUG nova.scheduler.client.report [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1106.900919] env[69994]: DEBUG oslo_vmware.api [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242544, 'name': CloneVM_Task, 'duration_secs': 1.167461} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.901230] env[69994]: INFO nova.virt.vmwareapi.vmops [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Created linked-clone VM from snapshot [ 1106.901958] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-605b0c9c-3cec-4dcb-be07-31335aa1e93f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.909427] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Uploading image 1ab8f161-e68e-437c-a5bf-624846f8bc40 {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1106.934506] env[69994]: DEBUG oslo_vmware.rw_handles [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1106.934506] env[69994]: value = "vm-648010" [ 1106.934506] env[69994]: _type = "VirtualMachine" [ 1106.934506] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1106.934795] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-78967168-392b-4373-b0da-3dc95bdac600 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.943220] env[69994]: DEBUG oslo_vmware.rw_handles [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lease: (returnval){ [ 1106.943220] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52be5351-90ae-e0e1-d403-0ccb30dfdcad" [ 1106.943220] env[69994]: _type = "HttpNfcLease" [ 1106.943220] env[69994]: } obtained for exporting VM: (result){ [ 1106.943220] env[69994]: value = "vm-648010" [ 1106.943220] env[69994]: _type = "VirtualMachine" [ 1106.943220] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1106.943543] env[69994]: DEBUG oslo_vmware.api [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the lease: (returnval){ [ 1106.943543] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52be5351-90ae-e0e1-d403-0ccb30dfdcad" [ 1106.943543] env[69994]: _type = "HttpNfcLease" [ 1106.943543] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1106.950136] env[69994]: DEBUG oslo_vmware.api [None req-6833bb18-be3a-4073-85c5-aef6314cde3a tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': task-3242545, 'name': PowerOffVM_Task, 'duration_secs': 0.203796} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.950721] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6833bb18-be3a-4073-85c5-aef6314cde3a tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1106.950893] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6833bb18-be3a-4073-85c5-aef6314cde3a tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1106.951165] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-70e46ec6-5df7-4d00-abea-3c2265a4dfec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.953948] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1106.953948] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52be5351-90ae-e0e1-d403-0ccb30dfdcad" [ 1106.953948] env[69994]: _type = "HttpNfcLease" [ 1106.953948] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1106.974802] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6833bb18-be3a-4073-85c5-aef6314cde3a tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1106.975017] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6833bb18-be3a-4073-85c5-aef6314cde3a tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1106.975204] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6833bb18-be3a-4073-85c5-aef6314cde3a tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Deleting the datastore file [datastore2] dd5ebc73-5866-4a5b-9d4f-aac721b0da8d {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1106.975465] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b3c1f0ee-6ff9-4589-b460-c99536a1e69b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.981851] env[69994]: DEBUG oslo_vmware.api [None req-6833bb18-be3a-4073-85c5-aef6314cde3a tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Waiting for the task: (returnval){ [ 1106.981851] env[69994]: value = "task-3242548" [ 1106.981851] env[69994]: _type = "Task" [ 1106.981851] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.990078] env[69994]: DEBUG oslo_vmware.api [None req-6833bb18-be3a-4073-85c5-aef6314cde3a tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': task-3242548, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.076459] env[69994]: DEBUG nova.compute.manager [req-7de97f79-b64c-4e57-a9e3-9b972c284ec0 req-c6ec6e8c-f4bf-47f4-9905-ac2f1efc061d service nova] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Received event network-changed-ae87b679-9aae-4b69-9339-d14c469c779b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1107.076546] env[69994]: DEBUG nova.compute.manager [req-7de97f79-b64c-4e57-a9e3-9b972c284ec0 req-c6ec6e8c-f4bf-47f4-9905-ac2f1efc061d service nova] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Refreshing instance network info cache due to event network-changed-ae87b679-9aae-4b69-9339-d14c469c779b. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1107.076827] env[69994]: DEBUG oslo_concurrency.lockutils [req-7de97f79-b64c-4e57-a9e3-9b972c284ec0 req-c6ec6e8c-f4bf-47f4-9905-ac2f1efc061d service nova] Acquiring lock "refresh_cache-05993c51-605c-4154-afc1-f3bc5344258c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1107.132027] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Releasing lock "refresh_cache-05993c51-605c-4154-afc1-f3bc5344258c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1107.132027] env[69994]: DEBUG nova.compute.manager [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Instance network_info: |[{"id": "ae87b679-9aae-4b69-9339-d14c469c779b", "address": "fa:16:3e:ba:95:a9", "network": {"id": "6b3cbf7e-5b76-4899-a67c-5cccdeb5d269", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1170571372-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25ae5db852d74e62aa2a72e59cfa3383", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae87b679-9a", "ovs_interfaceid": "ae87b679-9aae-4b69-9339-d14c469c779b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1107.133032] env[69994]: DEBUG oslo_concurrency.lockutils [req-7de97f79-b64c-4e57-a9e3-9b972c284ec0 req-c6ec6e8c-f4bf-47f4-9905-ac2f1efc061d service nova] Acquired lock "refresh_cache-05993c51-605c-4154-afc1-f3bc5344258c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1107.133191] env[69994]: DEBUG nova.network.neutron [req-7de97f79-b64c-4e57-a9e3-9b972c284ec0 req-c6ec6e8c-f4bf-47f4-9905-ac2f1efc061d service nova] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Refreshing network info cache for port ae87b679-9aae-4b69-9339-d14c469c779b {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1107.134636] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ba:95:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c492f5cc-7ae0-4cab-823c-0d5dd8c60b26', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ae87b679-9aae-4b69-9339-d14c469c779b', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1107.143953] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Creating folder: Project (25ae5db852d74e62aa2a72e59cfa3383). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1107.146056] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0c7bdb7e-0ed8-4b7f-9db9-4261e92ad2ad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.157527] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Created folder: Project (25ae5db852d74e62aa2a72e59cfa3383) in parent group-v647729. [ 1107.157861] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Creating folder: Instances. Parent ref: group-v648011. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1107.158133] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6de0261d-38f1-4121-b116-d0a6eddb437e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.162976] env[69994]: INFO nova.compute.manager [-] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Took 1.43 seconds to deallocate network for instance. [ 1107.169965] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Created folder: Instances in parent group-v648011. [ 1107.170234] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1107.170663] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1107.171015] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-52908b77-5efe-43c5-bd83-c2cd371fb30b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.188321] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69994) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1107.188589] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.821s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1107.188937] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.339s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1107.190635] env[69994]: INFO nova.compute.claims [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1107.195969] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1107.195969] env[69994]: value = "task-3242551" [ 1107.195969] env[69994]: _type = "Task" [ 1107.195969] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.205475] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242551, 'name': CreateVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.453511] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1107.453511] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52be5351-90ae-e0e1-d403-0ccb30dfdcad" [ 1107.453511] env[69994]: _type = "HttpNfcLease" [ 1107.453511] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1107.453897] env[69994]: DEBUG oslo_vmware.rw_handles [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1107.453897] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52be5351-90ae-e0e1-d403-0ccb30dfdcad" [ 1107.453897] env[69994]: _type = "HttpNfcLease" [ 1107.453897] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1107.454737] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f16f7d69-fce2-4815-bc11-ee5f77f7c40c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.462111] env[69994]: DEBUG oslo_vmware.rw_handles [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520d8efb-f722-be08-62ad-a43cae28ea66/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1107.462384] env[69994]: DEBUG oslo_vmware.rw_handles [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520d8efb-f722-be08-62ad-a43cae28ea66/disk-0.vmdk for reading. {{(pid=69994) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1107.527436] env[69994]: DEBUG oslo_vmware.api [None req-6833bb18-be3a-4073-85c5-aef6314cde3a tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Task: {'id': task-3242548, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.288798} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.527989] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6833bb18-be3a-4073-85c5-aef6314cde3a tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1107.528195] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6833bb18-be3a-4073-85c5-aef6314cde3a tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1107.528375] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6833bb18-be3a-4073-85c5-aef6314cde3a tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1107.528550] env[69994]: INFO nova.compute.manager [None req-6833bb18-be3a-4073-85c5-aef6314cde3a tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1107.528787] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6833bb18-be3a-4073-85c5-aef6314cde3a tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1107.528973] env[69994]: DEBUG nova.compute.manager [-] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1107.529079] env[69994]: DEBUG nova.network.neutron [-] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1107.549924] env[69994]: DEBUG nova.network.neutron [-] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1107.572660] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-962bee51-1b49-4ec1-b3c7-92c851adfa64 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.672708] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4dca99e8-faa9-4dd9-99d5-277ee8535de8 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1107.710809] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242551, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.897623] env[69994]: DEBUG nova.network.neutron [req-7de97f79-b64c-4e57-a9e3-9b972c284ec0 req-c6ec6e8c-f4bf-47f4-9905-ac2f1efc061d service nova] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Updated VIF entry in instance network info cache for port ae87b679-9aae-4b69-9339-d14c469c779b. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1107.899289] env[69994]: DEBUG nova.network.neutron [req-7de97f79-b64c-4e57-a9e3-9b972c284ec0 req-c6ec6e8c-f4bf-47f4-9905-ac2f1efc061d service nova] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Updating instance_info_cache with network_info: [{"id": "ae87b679-9aae-4b69-9339-d14c469c779b", "address": "fa:16:3e:ba:95:a9", "network": {"id": "6b3cbf7e-5b76-4899-a67c-5cccdeb5d269", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1170571372-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25ae5db852d74e62aa2a72e59cfa3383", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c492f5cc-7ae0-4cab-823c-0d5dd8c60b26", "external-id": "nsx-vlan-transportzone-824", "segmentation_id": 824, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae87b679-9a", "ovs_interfaceid": "ae87b679-9aae-4b69-9339-d14c469c779b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1108.052885] env[69994]: DEBUG nova.network.neutron [-] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1108.213952] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242551, 'name': CreateVM_Task, 'duration_secs': 0.572419} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.214342] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1108.215056] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.215279] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1108.215721] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1108.216053] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c17c2ff0-de77-4101-b87f-d432822e8cf4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.221136] env[69994]: DEBUG oslo_vmware.api [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Waiting for the task: (returnval){ [ 1108.221136] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52650bd3-af8f-2d68-44c6-67efcf0739b0" [ 1108.221136] env[69994]: _type = "Task" [ 1108.221136] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.230018] env[69994]: DEBUG oslo_vmware.api [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52650bd3-af8f-2d68-44c6-67efcf0739b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.398366] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a589947-80e0-489e-9cde-43c5a611f9e7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.401751] env[69994]: DEBUG oslo_concurrency.lockutils [req-7de97f79-b64c-4e57-a9e3-9b972c284ec0 req-c6ec6e8c-f4bf-47f4-9905-ac2f1efc061d service nova] Releasing lock "refresh_cache-05993c51-605c-4154-afc1-f3bc5344258c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1108.402082] env[69994]: DEBUG nova.compute.manager [req-7de97f79-b64c-4e57-a9e3-9b972c284ec0 req-c6ec6e8c-f4bf-47f4-9905-ac2f1efc061d service nova] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Received event network-vif-deleted-de3e77dc-7712-4e45-b1d3-fd50595cb0f2 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1108.407952] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12f2051b-0aa9-4d3e-b248-bd120b99ba5e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.442782] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4324e24c-b85f-47e9-9a6e-df07cb937390 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.450732] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dd9e36b-3324-43dc-8cc6-da0d427145d5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.465726] env[69994]: DEBUG nova.compute.provider_tree [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1108.555433] env[69994]: INFO nova.compute.manager [-] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Took 1.03 seconds to deallocate network for instance. [ 1108.732873] env[69994]: DEBUG oslo_vmware.api [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52650bd3-af8f-2d68-44c6-67efcf0739b0, 'name': SearchDatastore_Task, 'duration_secs': 0.010117} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.733324] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1108.733572] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1108.733881] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.734085] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1108.734329] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1108.734644] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-097e589d-8de7-4566-a359-50990f7ea101 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.742884] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1108.743198] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1108.744199] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58f4fb4a-b93c-4732-acf3-b6c3c6906f22 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.749783] env[69994]: DEBUG oslo_vmware.api [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Waiting for the task: (returnval){ [ 1108.749783] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]529e1e15-bcf4-f07b-ddb7-d9a6e1f4b777" [ 1108.749783] env[69994]: _type = "Task" [ 1108.749783] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.759090] env[69994]: DEBUG oslo_vmware.api [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]529e1e15-bcf4-f07b-ddb7-d9a6e1f4b777, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.970039] env[69994]: DEBUG nova.scheduler.client.report [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1109.062166] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6833bb18-be3a-4073-85c5-aef6314cde3a tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1109.260562] env[69994]: DEBUG oslo_vmware.api [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]529e1e15-bcf4-f07b-ddb7-d9a6e1f4b777, 'name': SearchDatastore_Task, 'duration_secs': 0.011923} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.261421] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a132da83-51c3-4607-96dc-77b752d3307c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.267161] env[69994]: DEBUG oslo_vmware.api [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Waiting for the task: (returnval){ [ 1109.267161] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5276c02f-2f0d-4b21-110c-73bce0f00bd7" [ 1109.267161] env[69994]: _type = "Task" [ 1109.267161] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.275064] env[69994]: DEBUG oslo_vmware.api [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5276c02f-2f0d-4b21-110c-73bce0f00bd7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.475509] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.286s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1109.476158] env[69994]: DEBUG nova.compute.manager [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1109.479138] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 9.605s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1109.777711] env[69994]: DEBUG oslo_vmware.api [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5276c02f-2f0d-4b21-110c-73bce0f00bd7, 'name': SearchDatastore_Task, 'duration_secs': 0.01653} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.777979] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1109.778255] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 05993c51-605c-4154-afc1-f3bc5344258c/05993c51-605c-4154-afc1-f3bc5344258c.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1109.778546] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-53db4c10-93c3-4ce3-af99-5ce6ced14ca4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.785045] env[69994]: DEBUG oslo_vmware.api [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Waiting for the task: (returnval){ [ 1109.785045] env[69994]: value = "task-3242552" [ 1109.785045] env[69994]: _type = "Task" [ 1109.785045] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.792844] env[69994]: DEBUG oslo_vmware.api [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Task: {'id': task-3242552, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.982701] env[69994]: DEBUG nova.compute.utils [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1109.984510] env[69994]: DEBUG nova.objects.instance [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lazy-loading 'migration_context' on Instance uuid e17fcc84-7c86-41b6-88ec-8a35619534b6 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1109.986164] env[69994]: DEBUG nova.compute.manager [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1109.986435] env[69994]: DEBUG nova.network.neutron [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1110.025755] env[69994]: DEBUG nova.policy [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b512f0a1ffba457b977e472009f59eed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '377f65074c2442588aee091b5165e1cf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1110.298574] env[69994]: DEBUG oslo_vmware.api [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Task: {'id': task-3242552, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.337670] env[69994]: DEBUG nova.network.neutron [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Successfully created port: 63b08705-8a3c-4011-9ebc-15f8463de275 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1110.487655] env[69994]: DEBUG nova.compute.manager [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1110.669572] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-510e33c8-ae59-47e2-9b36-438076e0ab97 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.677858] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4482faa0-3775-4983-a4b1-a112b45aebda {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.708467] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ff72e14-7f29-47e1-8d72-a9cff7307966 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.716342] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-786a9e57-85b1-4337-808c-43d155330ac2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.729696] env[69994]: DEBUG nova.compute.provider_tree [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1110.795964] env[69994]: DEBUG oslo_vmware.api [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Task: {'id': task-3242552, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.70576} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.796159] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 05993c51-605c-4154-afc1-f3bc5344258c/05993c51-605c-4154-afc1-f3bc5344258c.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1110.796372] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1110.796648] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-62c9593d-de14-4415-b602-3d0e38870e3d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.803617] env[69994]: DEBUG oslo_vmware.api [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Waiting for the task: (returnval){ [ 1110.803617] env[69994]: value = "task-3242553" [ 1110.803617] env[69994]: _type = "Task" [ 1110.803617] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.813328] env[69994]: DEBUG oslo_vmware.api [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Task: {'id': task-3242553, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.233574] env[69994]: DEBUG nova.scheduler.client.report [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1111.314377] env[69994]: DEBUG oslo_vmware.api [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Task: {'id': task-3242553, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065369} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.314753] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1111.315365] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-808dac07-0b66-4d19-a7f7-a51440d8ed2f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.337386] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] 05993c51-605c-4154-afc1-f3bc5344258c/05993c51-605c-4154-afc1-f3bc5344258c.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1111.337715] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a33ba0c4-891a-4b82-9a8c-352afe1011aa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.357396] env[69994]: DEBUG oslo_vmware.api [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Waiting for the task: (returnval){ [ 1111.357396] env[69994]: value = "task-3242554" [ 1111.357396] env[69994]: _type = "Task" [ 1111.357396] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.366038] env[69994]: DEBUG oslo_vmware.api [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Task: {'id': task-3242554, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.500037] env[69994]: DEBUG nova.compute.manager [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1111.522179] env[69994]: DEBUG nova.virt.hardware [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1111.522417] env[69994]: DEBUG nova.virt.hardware [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1111.522586] env[69994]: DEBUG nova.virt.hardware [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1111.522769] env[69994]: DEBUG nova.virt.hardware [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1111.522916] env[69994]: DEBUG nova.virt.hardware [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1111.523075] env[69994]: DEBUG nova.virt.hardware [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1111.523945] env[69994]: DEBUG nova.virt.hardware [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1111.523945] env[69994]: DEBUG nova.virt.hardware [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1111.523945] env[69994]: DEBUG nova.virt.hardware [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1111.523945] env[69994]: DEBUG nova.virt.hardware [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1111.523945] env[69994]: DEBUG nova.virt.hardware [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1111.524816] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78132f05-a4cc-4a9a-9069-26b29f7d87c0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.533422] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a4ae0fa-e083-409c-8188-203601fa3bfa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.774053] env[69994]: DEBUG nova.compute.manager [req-4c2d8aa2-2784-4dad-a6cd-1af39fc82f2d req-640144b8-e210-40a5-8bf5-0ed911079a5e service nova] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Received event network-vif-plugged-63b08705-8a3c-4011-9ebc-15f8463de275 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1111.774367] env[69994]: DEBUG oslo_concurrency.lockutils [req-4c2d8aa2-2784-4dad-a6cd-1af39fc82f2d req-640144b8-e210-40a5-8bf5-0ed911079a5e service nova] Acquiring lock "d8d2958c-e44c-4796-becc-c572057f7ba5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1111.774522] env[69994]: DEBUG oslo_concurrency.lockutils [req-4c2d8aa2-2784-4dad-a6cd-1af39fc82f2d req-640144b8-e210-40a5-8bf5-0ed911079a5e service nova] Lock "d8d2958c-e44c-4796-becc-c572057f7ba5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1111.774648] env[69994]: DEBUG oslo_concurrency.lockutils [req-4c2d8aa2-2784-4dad-a6cd-1af39fc82f2d req-640144b8-e210-40a5-8bf5-0ed911079a5e service nova] Lock "d8d2958c-e44c-4796-becc-c572057f7ba5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1111.774886] env[69994]: DEBUG nova.compute.manager [req-4c2d8aa2-2784-4dad-a6cd-1af39fc82f2d req-640144b8-e210-40a5-8bf5-0ed911079a5e service nova] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] No waiting events found dispatching network-vif-plugged-63b08705-8a3c-4011-9ebc-15f8463de275 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1111.775221] env[69994]: WARNING nova.compute.manager [req-4c2d8aa2-2784-4dad-a6cd-1af39fc82f2d req-640144b8-e210-40a5-8bf5-0ed911079a5e service nova] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Received unexpected event network-vif-plugged-63b08705-8a3c-4011-9ebc-15f8463de275 for instance with vm_state building and task_state spawning. [ 1111.858588] env[69994]: DEBUG nova.network.neutron [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Successfully updated port: 63b08705-8a3c-4011-9ebc-15f8463de275 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1111.869311] env[69994]: DEBUG oslo_vmware.api [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Task: {'id': task-3242554, 'name': ReconfigVM_Task, 'duration_secs': 0.28915} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.870092] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Reconfigured VM instance instance-00000062 to attach disk [datastore1] 05993c51-605c-4154-afc1-f3bc5344258c/05993c51-605c-4154-afc1-f3bc5344258c.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1111.870713] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0793923f-1873-4370-87d6-640b1deef81c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.877706] env[69994]: DEBUG oslo_vmware.api [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Waiting for the task: (returnval){ [ 1111.877706] env[69994]: value = "task-3242555" [ 1111.877706] env[69994]: _type = "Task" [ 1111.877706] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.886237] env[69994]: DEBUG oslo_vmware.api [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Task: {'id': task-3242555, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.245771] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.766s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.250922] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.348s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1112.253668] env[69994]: INFO nova.compute.claims [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1112.364602] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "refresh_cache-d8d2958c-e44c-4796-becc-c572057f7ba5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1112.364820] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquired lock "refresh_cache-d8d2958c-e44c-4796-becc-c572057f7ba5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1112.365066] env[69994]: DEBUG nova.network.neutron [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1112.387395] env[69994]: DEBUG oslo_vmware.api [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Task: {'id': task-3242555, 'name': Rename_Task, 'duration_secs': 0.142365} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.387708] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1112.387943] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e1f3b816-e21c-434e-a120-8b8ac417c3dc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.394688] env[69994]: DEBUG oslo_vmware.api [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Waiting for the task: (returnval){ [ 1112.394688] env[69994]: value = "task-3242556" [ 1112.394688] env[69994]: _type = "Task" [ 1112.394688] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.402392] env[69994]: DEBUG oslo_vmware.api [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Task: {'id': task-3242556, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.900422] env[69994]: DEBUG nova.network.neutron [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1112.908540] env[69994]: DEBUG oslo_vmware.api [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Task: {'id': task-3242556, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.056805] env[69994]: DEBUG nova.network.neutron [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Updating instance_info_cache with network_info: [{"id": "63b08705-8a3c-4011-9ebc-15f8463de275", "address": "fa:16:3e:da:d4:f3", "network": {"id": "0e925a40-2706-4137-8dd4-eeed26ae606e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1885024452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "377f65074c2442588aee091b5165e1cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63b08705-8a", "ovs_interfaceid": "63b08705-8a3c-4011-9ebc-15f8463de275", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1113.405826] env[69994]: DEBUG oslo_vmware.api [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Task: {'id': task-3242556, 'name': PowerOnVM_Task, 'duration_secs': 0.748689} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.408593] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1113.408806] env[69994]: INFO nova.compute.manager [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Took 8.75 seconds to spawn the instance on the hypervisor. [ 1113.409008] env[69994]: DEBUG nova.compute.manager [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1113.410070] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab9a28bc-a239-449e-96d5-80dc289ddbbd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.460540] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98e892e9-c269-4501-bcc5-8270d6e1f5ac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.469275] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1f05758-fa05-4697-afa7-6e593c6725a3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.502550] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-030758ff-18df-46c5-90fa-8fe5f354d768 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.510820] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c541d992-b19d-43f8-813a-90315e02d090 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.525986] env[69994]: DEBUG nova.compute.provider_tree [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1113.559249] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Releasing lock "refresh_cache-d8d2958c-e44c-4796-becc-c572057f7ba5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1113.559624] env[69994]: DEBUG nova.compute.manager [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Instance network_info: |[{"id": "63b08705-8a3c-4011-9ebc-15f8463de275", "address": "fa:16:3e:da:d4:f3", "network": {"id": "0e925a40-2706-4137-8dd4-eeed26ae606e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1885024452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "377f65074c2442588aee091b5165e1cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63b08705-8a", "ovs_interfaceid": "63b08705-8a3c-4011-9ebc-15f8463de275", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1113.560066] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:da:d4:f3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '87bbf4e0-9064-4516-b7e7-44973f817205', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '63b08705-8a3c-4011-9ebc-15f8463de275', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1113.568097] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1113.568623] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1113.568860] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4d06aab9-9790-478f-a8a8-17c2d383b679 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.589066] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1113.589066] env[69994]: value = "task-3242557" [ 1113.589066] env[69994]: _type = "Task" [ 1113.589066] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.597039] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242557, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.792928] env[69994]: INFO nova.compute.manager [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Swapping old allocation on dict_keys(['92ce3c95-4efe-4d04-802b-6b187afc5aa7']) held by migration 333534b2-bb77-4949-8bf8-98da2d12ec07 for instance [ 1113.804974] env[69994]: DEBUG nova.compute.manager [req-dfd14cd9-501a-48cf-a7c4-e9b4027346d9 req-ecb358fc-696f-4788-bcab-fd4867f9a293 service nova] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Received event network-changed-63b08705-8a3c-4011-9ebc-15f8463de275 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1113.805128] env[69994]: DEBUG nova.compute.manager [req-dfd14cd9-501a-48cf-a7c4-e9b4027346d9 req-ecb358fc-696f-4788-bcab-fd4867f9a293 service nova] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Refreshing instance network info cache due to event network-changed-63b08705-8a3c-4011-9ebc-15f8463de275. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1113.805407] env[69994]: DEBUG oslo_concurrency.lockutils [req-dfd14cd9-501a-48cf-a7c4-e9b4027346d9 req-ecb358fc-696f-4788-bcab-fd4867f9a293 service nova] Acquiring lock "refresh_cache-d8d2958c-e44c-4796-becc-c572057f7ba5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1113.805583] env[69994]: DEBUG oslo_concurrency.lockutils [req-dfd14cd9-501a-48cf-a7c4-e9b4027346d9 req-ecb358fc-696f-4788-bcab-fd4867f9a293 service nova] Acquired lock "refresh_cache-d8d2958c-e44c-4796-becc-c572057f7ba5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1113.805748] env[69994]: DEBUG nova.network.neutron [req-dfd14cd9-501a-48cf-a7c4-e9b4027346d9 req-ecb358fc-696f-4788-bcab-fd4867f9a293 service nova] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Refreshing network info cache for port 63b08705-8a3c-4011-9ebc-15f8463de275 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1113.820692] env[69994]: DEBUG nova.scheduler.client.report [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Overwriting current allocation {'allocations': {'92ce3c95-4efe-4d04-802b-6b187afc5aa7': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 138}}, 'project_id': '605d72502cc644bfa4d875bf348246de', 'user_id': '5f2b4659f30f4b9db4627d3d3abb6ba5', 'consumer_generation': 1} on consumer e17fcc84-7c86-41b6-88ec-8a35619534b6 {{(pid=69994) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1113.909594] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "refresh_cache-e17fcc84-7c86-41b6-88ec-8a35619534b6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1113.909782] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquired lock "refresh_cache-e17fcc84-7c86-41b6-88ec-8a35619534b6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1113.909958] env[69994]: DEBUG nova.network.neutron [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1113.929467] env[69994]: INFO nova.compute.manager [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Took 23.19 seconds to build instance. [ 1114.029489] env[69994]: DEBUG nova.scheduler.client.report [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1114.100925] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242557, 'name': CreateVM_Task, 'duration_secs': 0.378556} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.101141] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1114.101868] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1114.102106] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1114.102455] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1114.102758] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88f1cdcd-cdac-483b-8e9c-33ae5161c66e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.107722] env[69994]: DEBUG oslo_vmware.api [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1114.107722] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52fcef49-f3f0-bed1-4551-8a8acad940a2" [ 1114.107722] env[69994]: _type = "Task" [ 1114.107722] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.117181] env[69994]: DEBUG oslo_vmware.api [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52fcef49-f3f0-bed1-4551-8a8acad940a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.391495] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fd086d90-97fb-40b8-abdb-74329571194d tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Acquiring lock "05993c51-605c-4154-afc1-f3bc5344258c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1114.431853] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6d3da681-4319-459d-b172-cccd2eac1dc1 tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Lock "05993c51-605c-4154-afc1-f3bc5344258c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.700s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1114.431853] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fd086d90-97fb-40b8-abdb-74329571194d tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Lock "05993c51-605c-4154-afc1-f3bc5344258c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.040s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1114.431853] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fd086d90-97fb-40b8-abdb-74329571194d tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Acquiring lock "05993c51-605c-4154-afc1-f3bc5344258c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1114.432427] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fd086d90-97fb-40b8-abdb-74329571194d tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Lock "05993c51-605c-4154-afc1-f3bc5344258c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1114.432427] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fd086d90-97fb-40b8-abdb-74329571194d tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Lock "05993c51-605c-4154-afc1-f3bc5344258c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1114.435109] env[69994]: INFO nova.compute.manager [None req-fd086d90-97fb-40b8-abdb-74329571194d tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Terminating instance [ 1114.534386] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.283s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1114.534855] env[69994]: DEBUG nova.compute.manager [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1114.538500] env[69994]: DEBUG oslo_concurrency.lockutils [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.355s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1114.539951] env[69994]: INFO nova.compute.claims [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1114.613103] env[69994]: DEBUG nova.network.neutron [req-dfd14cd9-501a-48cf-a7c4-e9b4027346d9 req-ecb358fc-696f-4788-bcab-fd4867f9a293 service nova] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Updated VIF entry in instance network info cache for port 63b08705-8a3c-4011-9ebc-15f8463de275. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1114.613448] env[69994]: DEBUG nova.network.neutron [req-dfd14cd9-501a-48cf-a7c4-e9b4027346d9 req-ecb358fc-696f-4788-bcab-fd4867f9a293 service nova] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Updating instance_info_cache with network_info: [{"id": "63b08705-8a3c-4011-9ebc-15f8463de275", "address": "fa:16:3e:da:d4:f3", "network": {"id": "0e925a40-2706-4137-8dd4-eeed26ae606e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1885024452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "377f65074c2442588aee091b5165e1cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63b08705-8a", "ovs_interfaceid": "63b08705-8a3c-4011-9ebc-15f8463de275", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1114.623052] env[69994]: DEBUG oslo_vmware.api [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52fcef49-f3f0-bed1-4551-8a8acad940a2, 'name': SearchDatastore_Task, 'duration_secs': 0.015378} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.623356] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1114.623585] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1114.623822] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1114.623967] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1114.624510] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1114.625403] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dfd5cbaf-1721-4e84-96a7-45dfb09a225d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.634286] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1114.634908] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1114.637008] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-428b9a9e-4c37-4c44-a879-4e6e005f13c4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.643399] env[69994]: DEBUG oslo_vmware.api [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1114.643399] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52017a55-9b9b-a5cf-967b-9f7034290846" [ 1114.643399] env[69994]: _type = "Task" [ 1114.643399] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.653602] env[69994]: DEBUG oslo_vmware.api [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52017a55-9b9b-a5cf-967b-9f7034290846, 'name': SearchDatastore_Task} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.654458] env[69994]: DEBUG nova.network.neutron [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Updating instance_info_cache with network_info: [{"id": "b2521bc7-942e-4d29-bc89-0fd13a02f783", "address": "fa:16:3e:90:f2:06", "network": {"id": "6d4a143e-4f55-4918-8454-462892aacf0e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1594130263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "605d72502cc644bfa4d875bf348246de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52e117d3-d120-42c6-8e72-70085845acbf", "external-id": "nsx-vlan-transportzone-934", "segmentation_id": 934, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2521bc7-94", "ovs_interfaceid": "b2521bc7-942e-4d29-bc89-0fd13a02f783", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1114.658316] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5c5b2bd-9f12-4ebc-ac01-2ab654de8705 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.663456] env[69994]: DEBUG oslo_vmware.api [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1114.663456] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52521c5d-7088-c4c2-587f-4ff580d02955" [ 1114.663456] env[69994]: _type = "Task" [ 1114.663456] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.668971] env[69994]: DEBUG oslo_vmware.rw_handles [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520d8efb-f722-be08-62ad-a43cae28ea66/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1114.670336] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e639c010-d3d5-4566-b990-fff601b7f18a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.678298] env[69994]: DEBUG oslo_vmware.api [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52521c5d-7088-c4c2-587f-4ff580d02955, 'name': SearchDatastore_Task, 'duration_secs': 0.00972} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.680032] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1114.680120] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] d8d2958c-e44c-4796-becc-c572057f7ba5/d8d2958c-e44c-4796-becc-c572057f7ba5.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1114.680353] env[69994]: DEBUG oslo_vmware.rw_handles [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520d8efb-f722-be08-62ad-a43cae28ea66/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1114.680501] env[69994]: ERROR oslo_vmware.rw_handles [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520d8efb-f722-be08-62ad-a43cae28ea66/disk-0.vmdk due to incomplete transfer. [ 1114.680706] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e04b524c-dd9a-40a8-884d-132473299e51 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.682459] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-aba48b10-9ced-4784-a65b-a1d7a71f78b1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.689240] env[69994]: DEBUG oslo_vmware.api [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1114.689240] env[69994]: value = "task-3242558" [ 1114.689240] env[69994]: _type = "Task" [ 1114.689240] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.690193] env[69994]: DEBUG oslo_vmware.rw_handles [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520d8efb-f722-be08-62ad-a43cae28ea66/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1114.690386] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Uploaded image 1ab8f161-e68e-437c-a5bf-624846f8bc40 to the Glance image server {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1114.692812] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Destroying the VM {{(pid=69994) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1114.695904] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-b9071509-4550-484d-89bb-0b4383f29241 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.701802] env[69994]: DEBUG oslo_vmware.api [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242558, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.702993] env[69994]: DEBUG oslo_vmware.api [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1114.702993] env[69994]: value = "task-3242559" [ 1114.702993] env[69994]: _type = "Task" [ 1114.702993] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.710017] env[69994]: DEBUG oslo_vmware.api [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242559, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.941068] env[69994]: DEBUG nova.compute.manager [None req-fd086d90-97fb-40b8-abdb-74329571194d tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1114.941286] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fd086d90-97fb-40b8-abdb-74329571194d tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1114.942277] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d30299ee-7035-47f0-bde7-940e25c6a849 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.952024] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd086d90-97fb-40b8-abdb-74329571194d tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1114.952488] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-12a44e12-8896-4f7f-bef6-a98971a88093 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.960641] env[69994]: DEBUG oslo_vmware.api [None req-fd086d90-97fb-40b8-abdb-74329571194d tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Waiting for the task: (returnval){ [ 1114.960641] env[69994]: value = "task-3242560" [ 1114.960641] env[69994]: _type = "Task" [ 1114.960641] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.970983] env[69994]: DEBUG oslo_vmware.api [None req-fd086d90-97fb-40b8-abdb-74329571194d tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Task: {'id': task-3242560, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.044811] env[69994]: DEBUG nova.compute.utils [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1115.046491] env[69994]: DEBUG nova.compute.manager [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1115.047579] env[69994]: DEBUG nova.network.neutron [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1115.088798] env[69994]: DEBUG nova.policy [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '64b979ffffc94e09bf911bdb89f4796a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ccb64f97e46a4e499df974959db53dcd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1115.118950] env[69994]: DEBUG oslo_concurrency.lockutils [req-dfd14cd9-501a-48cf-a7c4-e9b4027346d9 req-ecb358fc-696f-4788-bcab-fd4867f9a293 service nova] Releasing lock "refresh_cache-d8d2958c-e44c-4796-becc-c572057f7ba5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1115.161756] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Releasing lock "refresh_cache-e17fcc84-7c86-41b6-88ec-8a35619534b6" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1115.162839] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edaba007-ceca-4a6b-a590-b765ce731741 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.170797] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7121858a-aeba-4b88-89fe-4bb81fe7de84 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.200560] env[69994]: DEBUG oslo_vmware.api [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242558, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.496862} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.200990] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] d8d2958c-e44c-4796-becc-c572057f7ba5/d8d2958c-e44c-4796-becc-c572057f7ba5.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1115.201150] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1115.201449] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3ead1fd6-9664-4527-8179-89a7b0ad1769 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.211579] env[69994]: DEBUG oslo_vmware.api [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242559, 'name': Destroy_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.213135] env[69994]: DEBUG oslo_vmware.api [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1115.213135] env[69994]: value = "task-3242561" [ 1115.213135] env[69994]: _type = "Task" [ 1115.213135] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.225141] env[69994]: DEBUG oslo_vmware.api [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242561, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.387090] env[69994]: DEBUG nova.network.neutron [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Successfully created port: ba2c9555-1cfb-479b-b793-f20615723d77 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1115.470724] env[69994]: DEBUG oslo_vmware.api [None req-fd086d90-97fb-40b8-abdb-74329571194d tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Task: {'id': task-3242560, 'name': PowerOffVM_Task, 'duration_secs': 0.209871} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.471068] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd086d90-97fb-40b8-abdb-74329571194d tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1115.471151] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fd086d90-97fb-40b8-abdb-74329571194d tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1115.471416] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-54c7ec90-c5e3-4649-9050-ecebf78ec85a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.534279] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fd086d90-97fb-40b8-abdb-74329571194d tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1115.534550] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fd086d90-97fb-40b8-abdb-74329571194d tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1115.534769] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd086d90-97fb-40b8-abdb-74329571194d tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Deleting the datastore file [datastore1] 05993c51-605c-4154-afc1-f3bc5344258c {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1115.535146] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ae51487e-e09a-4474-890a-421fa0feb39c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.542086] env[69994]: DEBUG oslo_vmware.api [None req-fd086d90-97fb-40b8-abdb-74329571194d tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Waiting for the task: (returnval){ [ 1115.542086] env[69994]: value = "task-3242563" [ 1115.542086] env[69994]: _type = "Task" [ 1115.542086] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.551012] env[69994]: DEBUG oslo_vmware.api [None req-fd086d90-97fb-40b8-abdb-74329571194d tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Task: {'id': task-3242563, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.551582] env[69994]: DEBUG nova.compute.manager [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1115.716059] env[69994]: DEBUG oslo_vmware.api [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242559, 'name': Destroy_Task, 'duration_secs': 0.550712} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.719375] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Destroyed the VM [ 1115.719636] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Deleting Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1115.722458] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-da16cc36-75e2-4c2c-8879-2c98a1ae287c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.729997] env[69994]: DEBUG oslo_vmware.api [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242561, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061514} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.731361] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1115.731702] env[69994]: DEBUG oslo_vmware.api [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1115.731702] env[69994]: value = "task-3242564" [ 1115.731702] env[69994]: _type = "Task" [ 1115.731702] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.732450] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2de19ac-411c-44b2-b80e-14df2879985a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.761492] env[69994]: DEBUG oslo_vmware.api [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242564, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.771367] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Reconfiguring VM instance instance-00000063 to attach disk [datastore2] d8d2958c-e44c-4796-becc-c572057f7ba5/d8d2958c-e44c-4796-becc-c572057f7ba5.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1115.774333] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-00c2d7e6-635a-4790-88de-3824308dd197 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.791694] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4400efa-e74f-466f-bf6f-5fc843a1b473 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.803050] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dcc6148-4e05-4ce6-9bde-2568f25381c5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.805944] env[69994]: DEBUG oslo_vmware.api [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1115.805944] env[69994]: value = "task-3242565" [ 1115.805944] env[69994]: _type = "Task" [ 1115.805944] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.836634] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c68437a-b23b-41d8-bf0b-f90d82867f1f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.842561] env[69994]: DEBUG oslo_vmware.api [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242565, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.847680] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7406cc37-7633-4431-978a-16b9d1b8a7de {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.861673] env[69994]: DEBUG nova.compute.provider_tree [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1116.052206] env[69994]: DEBUG oslo_vmware.api [None req-fd086d90-97fb-40b8-abdb-74329571194d tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Task: {'id': task-3242563, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143283} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.052462] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd086d90-97fb-40b8-abdb-74329571194d tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1116.052686] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fd086d90-97fb-40b8-abdb-74329571194d tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1116.052924] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fd086d90-97fb-40b8-abdb-74329571194d tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1116.053171] env[69994]: INFO nova.compute.manager [None req-fd086d90-97fb-40b8-abdb-74329571194d tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1116.053496] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fd086d90-97fb-40b8-abdb-74329571194d tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1116.053728] env[69994]: DEBUG nova.compute.manager [-] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1116.053848] env[69994]: DEBUG nova.network.neutron [-] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1116.247073] env[69994]: DEBUG oslo_vmware.api [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242564, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.299208] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1116.299208] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d6df0d14-6bb7-4e02-a89d-7460feb1f213 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.307026] env[69994]: DEBUG oslo_vmware.api [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1116.307026] env[69994]: value = "task-3242566" [ 1116.307026] env[69994]: _type = "Task" [ 1116.307026] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.320490] env[69994]: DEBUG oslo_vmware.api [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242566, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.326195] env[69994]: DEBUG oslo_vmware.api [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242565, 'name': ReconfigVM_Task, 'duration_secs': 0.329968} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.326195] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Reconfigured VM instance instance-00000063 to attach disk [datastore2] d8d2958c-e44c-4796-becc-c572057f7ba5/d8d2958c-e44c-4796-becc-c572057f7ba5.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1116.326195] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-28de384c-3c62-4d7c-ad91-1e1e6934d13d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.332188] env[69994]: DEBUG oslo_vmware.api [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1116.332188] env[69994]: value = "task-3242567" [ 1116.332188] env[69994]: _type = "Task" [ 1116.332188] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.344766] env[69994]: DEBUG oslo_vmware.api [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242567, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.364608] env[69994]: DEBUG nova.scheduler.client.report [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1116.371365] env[69994]: DEBUG nova.compute.manager [req-f1ff530a-fa72-4805-be90-d2248c0f4bed req-0c5f69d6-67d2-45f3-9a52-d17b4f68f371 service nova] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Received event network-vif-deleted-ae87b679-9aae-4b69-9339-d14c469c779b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1116.371620] env[69994]: INFO nova.compute.manager [req-f1ff530a-fa72-4805-be90-d2248c0f4bed req-0c5f69d6-67d2-45f3-9a52-d17b4f68f371 service nova] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Neutron deleted interface ae87b679-9aae-4b69-9339-d14c469c779b; detaching it from the instance and deleting it from the info cache [ 1116.371811] env[69994]: DEBUG nova.network.neutron [req-f1ff530a-fa72-4805-be90-d2248c0f4bed req-0c5f69d6-67d2-45f3-9a52-d17b4f68f371 service nova] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.563841] env[69994]: DEBUG nova.compute.manager [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1116.589572] env[69994]: DEBUG nova.virt.hardware [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1116.589811] env[69994]: DEBUG nova.virt.hardware [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1116.589969] env[69994]: DEBUG nova.virt.hardware [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1116.590162] env[69994]: DEBUG nova.virt.hardware [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1116.590309] env[69994]: DEBUG nova.virt.hardware [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1116.590455] env[69994]: DEBUG nova.virt.hardware [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1116.590662] env[69994]: DEBUG nova.virt.hardware [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1116.590817] env[69994]: DEBUG nova.virt.hardware [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1116.590978] env[69994]: DEBUG nova.virt.hardware [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1116.591152] env[69994]: DEBUG nova.virt.hardware [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1116.591321] env[69994]: DEBUG nova.virt.hardware [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1116.592221] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-748f5b44-1b39-4892-ab29-9025e9ac371c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.600100] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-119349ab-ef3a-4223-8111-5747a6a8b448 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.615241] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "65facb63-1323-4905-b107-a5c5782d4a4c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1116.615463] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "65facb63-1323-4905-b107-a5c5782d4a4c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1116.745618] env[69994]: DEBUG oslo_vmware.api [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242564, 'name': RemoveSnapshot_Task, 'duration_secs': 0.652657} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.746270] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Deleted Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1116.746510] env[69994]: INFO nova.compute.manager [None req-b3cdafab-13ab-4af8-a0cd-e6633e66a89b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Took 13.44 seconds to snapshot the instance on the hypervisor. [ 1116.781254] env[69994]: DEBUG nova.compute.manager [req-6d62771d-2505-469d-89e5-0f555fc3d011 req-7a5506c6-ccda-4e3e-bcf5-bed785b6e128 service nova] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Received event network-vif-plugged-ba2c9555-1cfb-479b-b793-f20615723d77 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1116.782939] env[69994]: DEBUG oslo_concurrency.lockutils [req-6d62771d-2505-469d-89e5-0f555fc3d011 req-7a5506c6-ccda-4e3e-bcf5-bed785b6e128 service nova] Acquiring lock "be421d40-9859-4e0d-aef8-a2feb8717a78-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1116.782939] env[69994]: DEBUG oslo_concurrency.lockutils [req-6d62771d-2505-469d-89e5-0f555fc3d011 req-7a5506c6-ccda-4e3e-bcf5-bed785b6e128 service nova] Lock "be421d40-9859-4e0d-aef8-a2feb8717a78-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1116.782939] env[69994]: DEBUG oslo_concurrency.lockutils [req-6d62771d-2505-469d-89e5-0f555fc3d011 req-7a5506c6-ccda-4e3e-bcf5-bed785b6e128 service nova] Lock "be421d40-9859-4e0d-aef8-a2feb8717a78-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1116.782939] env[69994]: DEBUG nova.compute.manager [req-6d62771d-2505-469d-89e5-0f555fc3d011 req-7a5506c6-ccda-4e3e-bcf5-bed785b6e128 service nova] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] No waiting events found dispatching network-vif-plugged-ba2c9555-1cfb-479b-b793-f20615723d77 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1116.782939] env[69994]: WARNING nova.compute.manager [req-6d62771d-2505-469d-89e5-0f555fc3d011 req-7a5506c6-ccda-4e3e-bcf5-bed785b6e128 service nova] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Received unexpected event network-vif-plugged-ba2c9555-1cfb-479b-b793-f20615723d77 for instance with vm_state building and task_state spawning. [ 1116.815146] env[69994]: DEBUG oslo_vmware.api [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242566, 'name': PowerOffVM_Task, 'duration_secs': 0.230529} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.815426] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1116.816112] env[69994]: DEBUG nova.virt.hardware [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1116.816326] env[69994]: DEBUG nova.virt.hardware [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1116.816484] env[69994]: DEBUG nova.virt.hardware [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1116.816666] env[69994]: DEBUG nova.virt.hardware [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1116.816812] env[69994]: DEBUG nova.virt.hardware [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1116.816956] env[69994]: DEBUG nova.virt.hardware [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1116.817171] env[69994]: DEBUG nova.virt.hardware [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1116.817329] env[69994]: DEBUG nova.virt.hardware [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1116.817495] env[69994]: DEBUG nova.virt.hardware [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1116.817659] env[69994]: DEBUG nova.virt.hardware [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1116.817830] env[69994]: DEBUG nova.virt.hardware [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1116.823180] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ae3f75bf-ec69-4529-a272-d7c3d0205c6b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.841464] env[69994]: DEBUG oslo_vmware.api [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242567, 'name': Rename_Task, 'duration_secs': 0.187} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.842669] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1116.842971] env[69994]: DEBUG oslo_vmware.api [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1116.842971] env[69994]: value = "task-3242568" [ 1116.842971] env[69994]: _type = "Task" [ 1116.842971] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.843159] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4a145013-7107-4dcf-9f89-4f3595c3488d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.852757] env[69994]: DEBUG nova.network.neutron [-] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.853850] env[69994]: DEBUG oslo_vmware.api [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242568, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.855035] env[69994]: DEBUG oslo_vmware.api [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1116.855035] env[69994]: value = "task-3242569" [ 1116.855035] env[69994]: _type = "Task" [ 1116.855035] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.863105] env[69994]: DEBUG oslo_vmware.api [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242569, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.867488] env[69994]: DEBUG nova.network.neutron [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Successfully updated port: ba2c9555-1cfb-479b-b793-f20615723d77 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1116.874105] env[69994]: DEBUG oslo_concurrency.lockutils [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.335s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1116.874856] env[69994]: DEBUG nova.compute.manager [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1116.879164] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2d0bb069-7275-46e4-8b22-1fb62aa6dfed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.192s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1116.879348] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2d0bb069-7275-46e4-8b22-1fb62aa6dfed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1116.885345] env[69994]: DEBUG oslo_concurrency.lockutils [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 12.308s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1116.885588] env[69994]: DEBUG nova.objects.instance [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69994) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1116.890299] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f51c81f0-7fc1-4779-a61c-72efd618131d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.904370] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f96a5cd-1702-48dd-8f5f-f15f28cf82ac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.925301] env[69994]: INFO nova.scheduler.client.report [None req-2d0bb069-7275-46e4-8b22-1fb62aa6dfed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Deleted allocations for instance ab99499b-21a2-465b-9975-4e0adb18df94 [ 1116.949667] env[69994]: DEBUG nova.compute.manager [req-f1ff530a-fa72-4805-be90-d2248c0f4bed req-0c5f69d6-67d2-45f3-9a52-d17b4f68f371 service nova] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Detach interface failed, port_id=ae87b679-9aae-4b69-9339-d14c469c779b, reason: Instance 05993c51-605c-4154-afc1-f3bc5344258c could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1117.117387] env[69994]: DEBUG nova.compute.manager [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1117.356697] env[69994]: DEBUG oslo_vmware.api [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242568, 'name': ReconfigVM_Task, 'duration_secs': 0.205204} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.361761] env[69994]: INFO nova.compute.manager [-] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Took 1.31 seconds to deallocate network for instance. [ 1117.362909] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17c52cba-6c88-465d-95c1-a6d11f51e531 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.373596] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "refresh_cache-be421d40-9859-4e0d-aef8-a2feb8717a78" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1117.373777] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquired lock "refresh_cache-be421d40-9859-4e0d-aef8-a2feb8717a78" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1117.373937] env[69994]: DEBUG nova.network.neutron [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1117.404895] env[69994]: DEBUG oslo_vmware.api [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242569, 'name': PowerOnVM_Task, 'duration_secs': 0.482357} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.406158] env[69994]: DEBUG nova.compute.utils [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1117.412133] env[69994]: DEBUG nova.virt.hardware [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1117.413186] env[69994]: DEBUG nova.virt.hardware [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1117.413186] env[69994]: DEBUG nova.virt.hardware [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1117.413186] env[69994]: DEBUG nova.virt.hardware [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1117.413283] env[69994]: DEBUG nova.virt.hardware [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1117.413866] env[69994]: DEBUG nova.virt.hardware [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1117.413866] env[69994]: DEBUG nova.virt.hardware [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1117.414289] env[69994]: DEBUG nova.virt.hardware [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1117.414502] env[69994]: DEBUG nova.virt.hardware [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1117.414793] env[69994]: DEBUG nova.virt.hardware [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1117.415115] env[69994]: DEBUG nova.virt.hardware [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1117.419422] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1117.419634] env[69994]: INFO nova.compute.manager [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Took 5.92 seconds to spawn the instance on the hypervisor. [ 1117.419838] env[69994]: DEBUG nova.compute.manager [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1117.420203] env[69994]: DEBUG nova.compute.manager [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1117.420447] env[69994]: DEBUG nova.network.neutron [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1117.423225] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b47dba4d-fa84-48cb-8987-6ec01a81ddbd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.426735] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-479951d8-921f-48b0-a6dc-88a90098efee {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.435545] env[69994]: DEBUG oslo_vmware.api [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1117.435545] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5273355c-809c-91f8-c278-a8928d44cc92" [ 1117.435545] env[69994]: _type = "Task" [ 1117.435545] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.437401] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2d0bb069-7275-46e4-8b22-1fb62aa6dfed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "ab99499b-21a2-465b-9975-4e0adb18df94" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.100s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1117.451210] env[69994]: DEBUG oslo_vmware.api [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5273355c-809c-91f8-c278-a8928d44cc92, 'name': SearchDatastore_Task, 'duration_secs': 0.009711} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.456922] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Reconfiguring VM instance instance-0000004f to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1117.457796] env[69994]: DEBUG nova.network.neutron [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1117.459842] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-99fc8031-fe96-4c77-817a-829cf1688cfc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.479276] env[69994]: DEBUG oslo_vmware.api [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1117.479276] env[69994]: value = "task-3242570" [ 1117.479276] env[69994]: _type = "Task" [ 1117.479276] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.488124] env[69994]: DEBUG oslo_vmware.api [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242570, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.491486] env[69994]: DEBUG nova.policy [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bb1874902bc24959b717674a99e530a9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ee188ea80c9847188df8b8482b7c6ec7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1117.622640] env[69994]: DEBUG nova.network.neutron [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Updating instance_info_cache with network_info: [{"id": "ba2c9555-1cfb-479b-b793-f20615723d77", "address": "fa:16:3e:3d:fe:51", "network": {"id": "84a79aab-0d1e-4aaf-9422-316cb7d239fb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1732515887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccb64f97e46a4e499df974959db53dcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba2c9555-1c", "ovs_interfaceid": "ba2c9555-1cfb-479b-b793-f20615723d77", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1117.645637] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1117.799723] env[69994]: DEBUG nova.network.neutron [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Successfully created port: c53c19bc-a6d9-4b00-907a-97b4755bb119 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1117.877023] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fd086d90-97fb-40b8-abdb-74329571194d tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1117.921094] env[69994]: DEBUG oslo_concurrency.lockutils [None req-428ff5c5-6bba-41c6-9c80-2ee8e977ad42 tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.033s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1117.921094] env[69994]: DEBUG nova.compute.manager [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1117.922892] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4dca99e8-faa9-4dd9-99d5-277ee8535de8 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.250s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1117.923348] env[69994]: DEBUG nova.objects.instance [None req-4dca99e8-faa9-4dd9-99d5-277ee8535de8 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lazy-loading 'resources' on Instance uuid ed662f67-be0e-4f19-bb8a-6af39b4d348c {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1117.956137] env[69994]: INFO nova.compute.manager [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Took 19.13 seconds to build instance. [ 1117.989609] env[69994]: DEBUG oslo_vmware.api [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242570, 'name': ReconfigVM_Task, 'duration_secs': 0.431706} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.989973] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Reconfigured VM instance instance-0000004f to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1117.990846] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9675d576-91cd-4f79-8284-2234326cc9b5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.020923] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] e17fcc84-7c86-41b6-88ec-8a35619534b6/e17fcc84-7c86-41b6-88ec-8a35619534b6.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1118.021222] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d6baa882-bb13-4457-b236-3e15be0a4d9c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.039173] env[69994]: DEBUG oslo_vmware.api [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1118.039173] env[69994]: value = "task-3242571" [ 1118.039173] env[69994]: _type = "Task" [ 1118.039173] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.047295] env[69994]: DEBUG oslo_vmware.api [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242571, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.128181] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Releasing lock "refresh_cache-be421d40-9859-4e0d-aef8-a2feb8717a78" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1118.128181] env[69994]: DEBUG nova.compute.manager [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Instance network_info: |[{"id": "ba2c9555-1cfb-479b-b793-f20615723d77", "address": "fa:16:3e:3d:fe:51", "network": {"id": "84a79aab-0d1e-4aaf-9422-316cb7d239fb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1732515887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccb64f97e46a4e499df974959db53dcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba2c9555-1c", "ovs_interfaceid": "ba2c9555-1cfb-479b-b793-f20615723d77", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1118.128181] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3d:fe:51', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c2383c27-232e-4745-9b0a-2dcbaabb188b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ba2c9555-1cfb-479b-b793-f20615723d77', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1118.135856] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1118.136114] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1118.136351] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5ea73464-1658-43bc-acc7-a9f663d4c183 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.156971] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1118.156971] env[69994]: value = "task-3242572" [ 1118.156971] env[69994]: _type = "Task" [ 1118.156971] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.164502] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242572, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.460623] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cd2ff8b2-5057-4578-aa48-478f696ba159 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "d8d2958c-e44c-4796-becc-c572057f7ba5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.638s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1118.550365] env[69994]: DEBUG oslo_vmware.api [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242571, 'name': ReconfigVM_Task, 'duration_secs': 0.482321} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.550650] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Reconfigured VM instance instance-0000004f to attach disk [datastore1] e17fcc84-7c86-41b6-88ec-8a35619534b6/e17fcc84-7c86-41b6-88ec-8a35619534b6.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1118.551547] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7bd6b53-ed63-4538-ae5b-e906c65441fb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.577693] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc4fa592-0bbc-4360-80e1-c9537b4f0e41 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.601456] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e723817a-f6c9-4980-86ca-2b0c5924c044 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.625390] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55e6d5f7-46c1-48cd-a6f6-cd9d786d54fe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.633803] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1118.633803] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-18d6b8bf-5ad8-4491-99e8-ff0b21a9486a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.639057] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-267ccb1d-f9e1-472b-a14a-f2d4642c78d9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.642613] env[69994]: DEBUG oslo_vmware.api [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1118.642613] env[69994]: value = "task-3242573" [ 1118.642613] env[69994]: _type = "Task" [ 1118.642613] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.648270] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9536cb84-4bd2-45dd-9cfa-4baa41b86906 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.654438] env[69994]: DEBUG oslo_vmware.api [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242573, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.686681] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6160438e-3e1f-4c7d-80c0-d87ac3b3c8f6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.695874] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242572, 'name': CreateVM_Task, 'duration_secs': 0.354437} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.697044] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e63fa0f-0e9e-4cc5-95a3-924ff8003eaf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.700567] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1118.701254] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1118.701447] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1118.701772] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1118.702359] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-839629b7-72fa-42e5-b647-e8f420da813c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.712201] env[69994]: DEBUG nova.compute.provider_tree [None req-4dca99e8-faa9-4dd9-99d5-277ee8535de8 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1118.714684] env[69994]: DEBUG oslo_vmware.api [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1118.714684] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52089de3-ce4e-b90d-62a9-b533ef498155" [ 1118.714684] env[69994]: _type = "Task" [ 1118.714684] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.724871] env[69994]: DEBUG oslo_vmware.api [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52089de3-ce4e-b90d-62a9-b533ef498155, 'name': SearchDatastore_Task, 'duration_secs': 0.010416} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.725161] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1118.725392] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1118.725626] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1118.725793] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1118.725975] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1118.726231] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-36fdd603-e9e9-4621-9158-d1ebeba57e4f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.739725] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1118.739924] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1118.740626] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c63c7e0b-1c1d-4c72-8a5a-1aa5e2a25277 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.746101] env[69994]: DEBUG oslo_vmware.api [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1118.746101] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]526c7943-35f6-9917-3481-bc86c7b437b4" [ 1118.746101] env[69994]: _type = "Task" [ 1118.746101] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.753373] env[69994]: DEBUG oslo_vmware.api [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]526c7943-35f6-9917-3481-bc86c7b437b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.809420] env[69994]: DEBUG nova.compute.manager [req-b1ade97a-e167-409f-8bab-720f8e031114 req-231b3989-ad15-4dd3-9cca-f71296da4796 service nova] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Received event network-changed-ba2c9555-1cfb-479b-b793-f20615723d77 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1118.809594] env[69994]: DEBUG nova.compute.manager [req-b1ade97a-e167-409f-8bab-720f8e031114 req-231b3989-ad15-4dd3-9cca-f71296da4796 service nova] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Refreshing instance network info cache due to event network-changed-ba2c9555-1cfb-479b-b793-f20615723d77. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1118.809811] env[69994]: DEBUG oslo_concurrency.lockutils [req-b1ade97a-e167-409f-8bab-720f8e031114 req-231b3989-ad15-4dd3-9cca-f71296da4796 service nova] Acquiring lock "refresh_cache-be421d40-9859-4e0d-aef8-a2feb8717a78" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1118.809953] env[69994]: DEBUG oslo_concurrency.lockutils [req-b1ade97a-e167-409f-8bab-720f8e031114 req-231b3989-ad15-4dd3-9cca-f71296da4796 service nova] Acquired lock "refresh_cache-be421d40-9859-4e0d-aef8-a2feb8717a78" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1118.810150] env[69994]: DEBUG nova.network.neutron [req-b1ade97a-e167-409f-8bab-720f8e031114 req-231b3989-ad15-4dd3-9cca-f71296da4796 service nova] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Refreshing network info cache for port ba2c9555-1cfb-479b-b793-f20615723d77 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1118.933604] env[69994]: DEBUG nova.compute.manager [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1118.958749] env[69994]: DEBUG nova.virt.hardware [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1118.959007] env[69994]: DEBUG nova.virt.hardware [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1118.959172] env[69994]: DEBUG nova.virt.hardware [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1118.959395] env[69994]: DEBUG nova.virt.hardware [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1118.959579] env[69994]: DEBUG nova.virt.hardware [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1118.959738] env[69994]: DEBUG nova.virt.hardware [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1118.959951] env[69994]: DEBUG nova.virt.hardware [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1118.960132] env[69994]: DEBUG nova.virt.hardware [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1118.960306] env[69994]: DEBUG nova.virt.hardware [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1118.960469] env[69994]: DEBUG nova.virt.hardware [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1118.960641] env[69994]: DEBUG nova.virt.hardware [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1118.961558] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b95a1aa-df6d-4918-bb09-5117e7265ed3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.970226] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcb44fb9-b1c2-4625-9076-1372d5cf8271 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.154837] env[69994]: DEBUG oslo_vmware.api [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242573, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.217256] env[69994]: DEBUG nova.scheduler.client.report [None req-4dca99e8-faa9-4dd9-99d5-277ee8535de8 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1119.256850] env[69994]: DEBUG oslo_vmware.api [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]526c7943-35f6-9917-3481-bc86c7b437b4, 'name': SearchDatastore_Task, 'duration_secs': 0.013989} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.257637] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-501881bc-9874-46ca-bb09-63a80792b69f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.263100] env[69994]: DEBUG oslo_vmware.api [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1119.263100] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52be76e8-738a-4cdf-335f-b192efc84cc9" [ 1119.263100] env[69994]: _type = "Task" [ 1119.263100] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.270612] env[69994]: DEBUG oslo_vmware.api [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52be76e8-738a-4cdf-335f-b192efc84cc9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.369807] env[69994]: DEBUG nova.compute.manager [req-b069d6cf-7b42-4a77-8ee4-884b87c8e09b req-6a42adfb-f042-4fa3-aa0e-7f830308f407 service nova] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Received event network-vif-plugged-c53c19bc-a6d9-4b00-907a-97b4755bb119 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1119.369807] env[69994]: DEBUG oslo_concurrency.lockutils [req-b069d6cf-7b42-4a77-8ee4-884b87c8e09b req-6a42adfb-f042-4fa3-aa0e-7f830308f407 service nova] Acquiring lock "ca237467-eafc-4c18-a56e-98b94d111c92-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1119.370103] env[69994]: DEBUG oslo_concurrency.lockutils [req-b069d6cf-7b42-4a77-8ee4-884b87c8e09b req-6a42adfb-f042-4fa3-aa0e-7f830308f407 service nova] Lock "ca237467-eafc-4c18-a56e-98b94d111c92-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1119.370256] env[69994]: DEBUG oslo_concurrency.lockutils [req-b069d6cf-7b42-4a77-8ee4-884b87c8e09b req-6a42adfb-f042-4fa3-aa0e-7f830308f407 service nova] Lock "ca237467-eafc-4c18-a56e-98b94d111c92-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1119.370400] env[69994]: DEBUG nova.compute.manager [req-b069d6cf-7b42-4a77-8ee4-884b87c8e09b req-6a42adfb-f042-4fa3-aa0e-7f830308f407 service nova] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] No waiting events found dispatching network-vif-plugged-c53c19bc-a6d9-4b00-907a-97b4755bb119 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1119.370604] env[69994]: WARNING nova.compute.manager [req-b069d6cf-7b42-4a77-8ee4-884b87c8e09b req-6a42adfb-f042-4fa3-aa0e-7f830308f407 service nova] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Received unexpected event network-vif-plugged-c53c19bc-a6d9-4b00-907a-97b4755bb119 for instance with vm_state building and task_state spawning. [ 1119.482622] env[69994]: DEBUG nova.network.neutron [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Successfully updated port: c53c19bc-a6d9-4b00-907a-97b4755bb119 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1119.506791] env[69994]: DEBUG nova.compute.manager [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Stashing vm_state: active {{(pid=69994) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1119.527443] env[69994]: DEBUG nova.network.neutron [req-b1ade97a-e167-409f-8bab-720f8e031114 req-231b3989-ad15-4dd3-9cca-f71296da4796 service nova] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Updated VIF entry in instance network info cache for port ba2c9555-1cfb-479b-b793-f20615723d77. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1119.527617] env[69994]: DEBUG nova.network.neutron [req-b1ade97a-e167-409f-8bab-720f8e031114 req-231b3989-ad15-4dd3-9cca-f71296da4796 service nova] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Updating instance_info_cache with network_info: [{"id": "ba2c9555-1cfb-479b-b793-f20615723d77", "address": "fa:16:3e:3d:fe:51", "network": {"id": "84a79aab-0d1e-4aaf-9422-316cb7d239fb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1732515887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccb64f97e46a4e499df974959db53dcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba2c9555-1c", "ovs_interfaceid": "ba2c9555-1cfb-479b-b793-f20615723d77", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1119.653565] env[69994]: DEBUG oslo_vmware.api [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242573, 'name': PowerOnVM_Task, 'duration_secs': 0.84737} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.653944] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1119.725950] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4dca99e8-faa9-4dd9-99d5-277ee8535de8 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.803s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1119.729060] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6833bb18-be3a-4073-85c5-aef6314cde3a tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.666s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1119.729060] env[69994]: DEBUG nova.objects.instance [None req-6833bb18-be3a-4073-85c5-aef6314cde3a tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Lazy-loading 'resources' on Instance uuid dd5ebc73-5866-4a5b-9d4f-aac721b0da8d {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1119.747426] env[69994]: INFO nova.scheduler.client.report [None req-4dca99e8-faa9-4dd9-99d5-277ee8535de8 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Deleted allocations for instance ed662f67-be0e-4f19-bb8a-6af39b4d348c [ 1119.774763] env[69994]: DEBUG oslo_vmware.api [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52be76e8-738a-4cdf-335f-b192efc84cc9, 'name': SearchDatastore_Task, 'duration_secs': 0.008814} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.775034] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1119.775306] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] be421d40-9859-4e0d-aef8-a2feb8717a78/be421d40-9859-4e0d-aef8-a2feb8717a78.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1119.775565] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-96c0bf1e-fa2c-476f-bc69-3f18b1adb3b7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.782243] env[69994]: DEBUG oslo_vmware.api [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1119.782243] env[69994]: value = "task-3242574" [ 1119.782243] env[69994]: _type = "Task" [ 1119.782243] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.789891] env[69994]: DEBUG oslo_vmware.api [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242574, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.986339] env[69994]: DEBUG oslo_concurrency.lockutils [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "refresh_cache-ca237467-eafc-4c18-a56e-98b94d111c92" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1119.986631] env[69994]: DEBUG oslo_concurrency.lockutils [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquired lock "refresh_cache-ca237467-eafc-4c18-a56e-98b94d111c92" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1119.986955] env[69994]: DEBUG nova.network.neutron [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1120.029997] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1120.031859] env[69994]: DEBUG oslo_concurrency.lockutils [req-b1ade97a-e167-409f-8bab-720f8e031114 req-231b3989-ad15-4dd3-9cca-f71296da4796 service nova] Releasing lock "refresh_cache-be421d40-9859-4e0d-aef8-a2feb8717a78" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1120.254480] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4dca99e8-faa9-4dd9-99d5-277ee8535de8 tempest-ServerRescueNegativeTestJSON-1382256594 tempest-ServerRescueNegativeTestJSON-1382256594-project-member] Lock "ed662f67-be0e-4f19-bb8a-6af39b4d348c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.138s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1120.293079] env[69994]: DEBUG oslo_vmware.api [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242574, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.436186} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.295493] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] be421d40-9859-4e0d-aef8-a2feb8717a78/be421d40-9859-4e0d-aef8-a2feb8717a78.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1120.295745] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1120.296191] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2e2e9bf9-f8a7-4136-b36b-03eca8708047 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.302793] env[69994]: DEBUG oslo_vmware.api [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1120.302793] env[69994]: value = "task-3242575" [ 1120.302793] env[69994]: _type = "Task" [ 1120.302793] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.315257] env[69994]: DEBUG oslo_vmware.api [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242575, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.389727] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c967389-e137-49db-940e-eb36ce7996c2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.396934] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6be1d701-61bb-4f0c-8c1f-0dd878afa95c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.426659] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c1604f8-e5b8-4a8c-a226-031d45b61874 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.434555] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-405f8f44-2876-477b-ba12-165c996c2354 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.448336] env[69994]: DEBUG nova.compute.provider_tree [None req-6833bb18-be3a-4073-85c5-aef6314cde3a tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1120.529941] env[69994]: DEBUG nova.network.neutron [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1120.589317] env[69994]: INFO nova.compute.manager [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Rebuilding instance [ 1120.643992] env[69994]: DEBUG nova.compute.manager [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1120.644849] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30a43d3b-cdbb-4702-a6ad-b30a5879e5c5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.703719] env[69994]: INFO nova.compute.manager [None req-04cdc896-1661-43cb-947e-dbe19aae0098 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Updating instance to original state: 'active' [ 1120.763377] env[69994]: DEBUG nova.network.neutron [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Updating instance_info_cache with network_info: [{"id": "c53c19bc-a6d9-4b00-907a-97b4755bb119", "address": "fa:16:3e:81:39:4b", "network": {"id": "14e5b992-c393-4acc-ab6d-ad5983fe2729", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1321568807-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee188ea80c9847188df8b8482b7c6ec7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc53c19bc-a6", "ovs_interfaceid": "c53c19bc-a6d9-4b00-907a-97b4755bb119", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1120.812390] env[69994]: DEBUG oslo_vmware.api [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242575, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06138} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.812971] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1120.813837] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53da2e21-8cd3-496f-aab7-11663cf25b15 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.839346] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] be421d40-9859-4e0d-aef8-a2feb8717a78/be421d40-9859-4e0d-aef8-a2feb8717a78.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1120.839859] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-31fb2c7b-469b-41dc-959b-b2a901660545 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.861889] env[69994]: DEBUG oslo_vmware.api [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1120.861889] env[69994]: value = "task-3242576" [ 1120.861889] env[69994]: _type = "Task" [ 1120.861889] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.869598] env[69994]: DEBUG oslo_vmware.api [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242576, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.951309] env[69994]: DEBUG nova.scheduler.client.report [None req-6833bb18-be3a-4073-85c5-aef6314cde3a tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1121.265961] env[69994]: DEBUG oslo_concurrency.lockutils [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Releasing lock "refresh_cache-ca237467-eafc-4c18-a56e-98b94d111c92" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1121.269186] env[69994]: DEBUG nova.compute.manager [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Instance network_info: |[{"id": "c53c19bc-a6d9-4b00-907a-97b4755bb119", "address": "fa:16:3e:81:39:4b", "network": {"id": "14e5b992-c393-4acc-ab6d-ad5983fe2729", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1321568807-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee188ea80c9847188df8b8482b7c6ec7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc53c19bc-a6", "ovs_interfaceid": "c53c19bc-a6d9-4b00-907a-97b4755bb119", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1121.269186] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:81:39:4b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e31264e2-3e0a-4dfb-ba1f-6389d7d47548', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c53c19bc-a6d9-4b00-907a-97b4755bb119', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1121.275221] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1121.275588] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1121.276227] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ac607a8c-4612-401c-8dda-d7a487072500 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.298175] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1121.298175] env[69994]: value = "task-3242577" [ 1121.298175] env[69994]: _type = "Task" [ 1121.298175] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.306577] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242577, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.371528] env[69994]: DEBUG oslo_vmware.api [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242576, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.456120] env[69994]: DEBUG nova.compute.manager [req-d84832de-e930-4246-9637-56358bcc635f req-2499a1b9-9438-4670-b9f5-ed04bdfc7e38 service nova] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Received event network-changed-c53c19bc-a6d9-4b00-907a-97b4755bb119 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1121.456120] env[69994]: DEBUG nova.compute.manager [req-d84832de-e930-4246-9637-56358bcc635f req-2499a1b9-9438-4670-b9f5-ed04bdfc7e38 service nova] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Refreshing instance network info cache due to event network-changed-c53c19bc-a6d9-4b00-907a-97b4755bb119. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1121.456120] env[69994]: DEBUG oslo_concurrency.lockutils [req-d84832de-e930-4246-9637-56358bcc635f req-2499a1b9-9438-4670-b9f5-ed04bdfc7e38 service nova] Acquiring lock "refresh_cache-ca237467-eafc-4c18-a56e-98b94d111c92" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1121.456120] env[69994]: DEBUG oslo_concurrency.lockutils [req-d84832de-e930-4246-9637-56358bcc635f req-2499a1b9-9438-4670-b9f5-ed04bdfc7e38 service nova] Acquired lock "refresh_cache-ca237467-eafc-4c18-a56e-98b94d111c92" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1121.456120] env[69994]: DEBUG nova.network.neutron [req-d84832de-e930-4246-9637-56358bcc635f req-2499a1b9-9438-4670-b9f5-ed04bdfc7e38 service nova] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Refreshing network info cache for port c53c19bc-a6d9-4b00-907a-97b4755bb119 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1121.458395] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6833bb18-be3a-4073-85c5-aef6314cde3a tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.730s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1121.460959] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.815s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1121.463348] env[69994]: INFO nova.compute.claims [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1121.488255] env[69994]: INFO nova.scheduler.client.report [None req-6833bb18-be3a-4073-85c5-aef6314cde3a tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Deleted allocations for instance dd5ebc73-5866-4a5b-9d4f-aac721b0da8d [ 1121.659347] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1121.660587] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e19efcea-f9db-4de2-a916-86facce41641 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.667483] env[69994]: DEBUG oslo_vmware.api [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1121.667483] env[69994]: value = "task-3242578" [ 1121.667483] env[69994]: _type = "Task" [ 1121.667483] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.677113] env[69994]: DEBUG oslo_vmware.api [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242578, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.809562] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242577, 'name': CreateVM_Task, 'duration_secs': 0.447724} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.809562] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1121.809562] env[69994]: DEBUG oslo_concurrency.lockutils [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1121.810065] env[69994]: DEBUG oslo_concurrency.lockutils [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1121.810065] env[69994]: DEBUG oslo_concurrency.lockutils [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1121.810290] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-465536b2-ca75-4ff4-9b34-03f0adc3e5f8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.815344] env[69994]: DEBUG oslo_vmware.api [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 1121.815344] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52295be7-47da-4121-24e3-73cbd25ef567" [ 1121.815344] env[69994]: _type = "Task" [ 1121.815344] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.824938] env[69994]: DEBUG oslo_vmware.api [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52295be7-47da-4121-24e3-73cbd25ef567, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.874349] env[69994]: DEBUG oslo_vmware.api [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242576, 'name': ReconfigVM_Task, 'duration_secs': 0.845525} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.874349] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Reconfigured VM instance instance-00000064 to attach disk [datastore1] be421d40-9859-4e0d-aef8-a2feb8717a78/be421d40-9859-4e0d-aef8-a2feb8717a78.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1121.874349] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a5d84c06-f8ab-410f-9b30-252fe8d8796d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.880790] env[69994]: DEBUG oslo_vmware.api [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1121.880790] env[69994]: value = "task-3242579" [ 1121.880790] env[69994]: _type = "Task" [ 1121.880790] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.890286] env[69994]: DEBUG oslo_vmware.api [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242579, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.001794] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6833bb18-be3a-4073-85c5-aef6314cde3a tempest-ServersListShow296Test-1370496072 tempest-ServersListShow296Test-1370496072-project-member] Lock "dd5ebc73-5866-4a5b-9d4f-aac721b0da8d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.178s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1122.161730] env[69994]: DEBUG nova.network.neutron [req-d84832de-e930-4246-9637-56358bcc635f req-2499a1b9-9438-4670-b9f5-ed04bdfc7e38 service nova] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Updated VIF entry in instance network info cache for port c53c19bc-a6d9-4b00-907a-97b4755bb119. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1122.162238] env[69994]: DEBUG nova.network.neutron [req-d84832de-e930-4246-9637-56358bcc635f req-2499a1b9-9438-4670-b9f5-ed04bdfc7e38 service nova] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Updating instance_info_cache with network_info: [{"id": "c53c19bc-a6d9-4b00-907a-97b4755bb119", "address": "fa:16:3e:81:39:4b", "network": {"id": "14e5b992-c393-4acc-ab6d-ad5983fe2729", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1321568807-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee188ea80c9847188df8b8482b7c6ec7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc53c19bc-a6", "ovs_interfaceid": "c53c19bc-a6d9-4b00-907a-97b4755bb119", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1122.177773] env[69994]: DEBUG oslo_vmware.api [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242578, 'name': PowerOffVM_Task, 'duration_secs': 0.180796} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.178040] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1122.178351] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1122.179049] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d069e08-1be6-4928-912a-65fda6163b9c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.186307] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1122.186546] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0fcbf349-390a-4f0a-96d6-7beac2f3042b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.234344] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "e17fcc84-7c86-41b6-88ec-8a35619534b6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1122.234344] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "e17fcc84-7c86-41b6-88ec-8a35619534b6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1122.234344] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "e17fcc84-7c86-41b6-88ec-8a35619534b6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1122.234344] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "e17fcc84-7c86-41b6-88ec-8a35619534b6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1122.234344] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "e17fcc84-7c86-41b6-88ec-8a35619534b6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1122.236712] env[69994]: INFO nova.compute.manager [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Terminating instance [ 1122.247800] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1122.248013] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1122.248013] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Deleting the datastore file [datastore1] e6acdc45-5e8f-4ff0-9259-3de73a6fdd14 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1122.248925] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a3e03fd6-6e58-42d3-b5e5-eb87a42b85f0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.256021] env[69994]: DEBUG oslo_vmware.api [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1122.256021] env[69994]: value = "task-3242581" [ 1122.256021] env[69994]: _type = "Task" [ 1122.256021] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.264293] env[69994]: DEBUG oslo_vmware.api [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242581, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.329745] env[69994]: DEBUG oslo_vmware.api [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52295be7-47da-4121-24e3-73cbd25ef567, 'name': SearchDatastore_Task, 'duration_secs': 0.011934} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.330227] env[69994]: DEBUG oslo_concurrency.lockutils [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1122.330563] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1122.330830] env[69994]: DEBUG oslo_concurrency.lockutils [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1122.331041] env[69994]: DEBUG oslo_concurrency.lockutils [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1122.331279] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1122.331711] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0812837e-14ce-45b2-b51c-8fc852d6ac24 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.349737] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1122.349979] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1122.350895] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3145ad8a-a1c3-4b32-a2d2-04fe116f95a8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.356387] env[69994]: DEBUG oslo_vmware.api [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 1122.356387] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]522bff31-a2a7-ab1d-79c1-bca6222279c7" [ 1122.356387] env[69994]: _type = "Task" [ 1122.356387] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.365205] env[69994]: DEBUG oslo_vmware.api [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]522bff31-a2a7-ab1d-79c1-bca6222279c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.390301] env[69994]: DEBUG oslo_vmware.api [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242579, 'name': Rename_Task, 'duration_secs': 0.158091} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.390591] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1122.390850] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-50d50f39-02ac-4830-9e8b-7fde44117339 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.397619] env[69994]: DEBUG oslo_vmware.api [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1122.397619] env[69994]: value = "task-3242582" [ 1122.397619] env[69994]: _type = "Task" [ 1122.397619] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.405676] env[69994]: DEBUG oslo_vmware.api [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242582, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.649456] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de9920ae-9c28-46e5-af0c-41382eb28e8d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.658313] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6e41d0a-ea9a-487d-a913-3c3fcabfe462 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.664959] env[69994]: DEBUG oslo_concurrency.lockutils [req-d84832de-e930-4246-9637-56358bcc635f req-2499a1b9-9438-4670-b9f5-ed04bdfc7e38 service nova] Releasing lock "refresh_cache-ca237467-eafc-4c18-a56e-98b94d111c92" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1122.692620] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6237e842-d03f-4604-b660-b48baf3649f0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.700892] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-700acb92-96aa-4733-aba1-aefdc10e768d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.714404] env[69994]: DEBUG nova.compute.provider_tree [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1122.741377] env[69994]: DEBUG nova.compute.manager [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1122.741802] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1122.742106] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dde880d9-2716-42a1-9c7c-d6758e33334f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.748787] env[69994]: DEBUG oslo_vmware.api [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1122.748787] env[69994]: value = "task-3242583" [ 1122.748787] env[69994]: _type = "Task" [ 1122.748787] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.757104] env[69994]: DEBUG oslo_vmware.api [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242583, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.766205] env[69994]: DEBUG oslo_vmware.api [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242581, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154599} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.766451] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1122.766633] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1122.766805] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1122.866326] env[69994]: DEBUG oslo_vmware.api [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]522bff31-a2a7-ab1d-79c1-bca6222279c7, 'name': SearchDatastore_Task, 'duration_secs': 0.008409} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.867123] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1cf62aa-04ef-4ba3-85b7-21a9dd61bef4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.872814] env[69994]: DEBUG oslo_vmware.api [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 1122.872814] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52a421cf-aafc-de24-99b1-18f10733c6ab" [ 1122.872814] env[69994]: _type = "Task" [ 1122.872814] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.880410] env[69994]: DEBUG oslo_vmware.api [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52a421cf-aafc-de24-99b1-18f10733c6ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.906533] env[69994]: DEBUG oslo_vmware.api [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242582, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.217226] env[69994]: DEBUG nova.scheduler.client.report [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1123.258927] env[69994]: DEBUG oslo_vmware.api [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242583, 'name': PowerOffVM_Task, 'duration_secs': 0.302132} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.259142] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1123.259348] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Volume detach. Driver type: vmdk {{(pid=69994) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1123.259532] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647994', 'volume_id': '15a9e42e-3dfd-43a3-9ef9-92e636edfdbc', 'name': 'volume-15a9e42e-3dfd-43a3-9ef9-92e636edfdbc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'e17fcc84-7c86-41b6-88ec-8a35619534b6', 'attached_at': '2025-04-03T08:47:33.000000', 'detached_at': '', 'volume_id': '15a9e42e-3dfd-43a3-9ef9-92e636edfdbc', 'serial': '15a9e42e-3dfd-43a3-9ef9-92e636edfdbc'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1123.260319] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81d9257e-9383-4e5c-a3ec-5557d36ca0ca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.284736] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-501e67f1-7397-4d58-9d3b-97d532d13e12 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.292316] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b1e3e7a-846b-4a8c-a23f-3041c53e32f9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.315639] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9082aef3-3dc4-4551-8fc6-2634a833dd6f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.329168] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] The volume has not been displaced from its original location: [datastore2] volume-15a9e42e-3dfd-43a3-9ef9-92e636edfdbc/volume-15a9e42e-3dfd-43a3-9ef9-92e636edfdbc.vmdk. No consolidation needed. {{(pid=69994) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1123.334523] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Reconfiguring VM instance instance-0000004f to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1123.335059] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-16b147ac-3520-4252-9d20-9459640301d3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.355260] env[69994]: DEBUG oslo_vmware.api [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1123.355260] env[69994]: value = "task-3242584" [ 1123.355260] env[69994]: _type = "Task" [ 1123.355260] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.363189] env[69994]: DEBUG oslo_vmware.api [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242584, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.382385] env[69994]: DEBUG oslo_vmware.api [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52a421cf-aafc-de24-99b1-18f10733c6ab, 'name': SearchDatastore_Task, 'duration_secs': 0.009341} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.382668] env[69994]: DEBUG oslo_concurrency.lockutils [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1123.382941] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] ca237467-eafc-4c18-a56e-98b94d111c92/ca237467-eafc-4c18-a56e-98b94d111c92.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1123.383220] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c51f5ebb-c2e2-4414-986b-4cec936a0010 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.388919] env[69994]: DEBUG oslo_vmware.api [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 1123.388919] env[69994]: value = "task-3242585" [ 1123.388919] env[69994]: _type = "Task" [ 1123.388919] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.396657] env[69994]: DEBUG oslo_vmware.api [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242585, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.406855] env[69994]: DEBUG oslo_vmware.api [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242582, 'name': PowerOnVM_Task, 'duration_secs': 0.766072} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.407147] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1123.407381] env[69994]: INFO nova.compute.manager [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Took 6.84 seconds to spawn the instance on the hypervisor. [ 1123.407590] env[69994]: DEBUG nova.compute.manager [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1123.408624] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d5080a8-e29f-414d-b0a2-890a7af3f1d0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.725024] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.262s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1123.725024] env[69994]: DEBUG nova.compute.manager [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1123.727964] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fd086d90-97fb-40b8-abdb-74329571194d tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.851s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1123.728588] env[69994]: DEBUG nova.objects.instance [None req-fd086d90-97fb-40b8-abdb-74329571194d tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Lazy-loading 'resources' on Instance uuid 05993c51-605c-4154-afc1-f3bc5344258c {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1123.818979] env[69994]: DEBUG nova.virt.hardware [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1123.818979] env[69994]: DEBUG nova.virt.hardware [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1123.819512] env[69994]: DEBUG nova.virt.hardware [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1123.819512] env[69994]: DEBUG nova.virt.hardware [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1123.819512] env[69994]: DEBUG nova.virt.hardware [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1123.819620] env[69994]: DEBUG nova.virt.hardware [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1123.819770] env[69994]: DEBUG nova.virt.hardware [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1123.819914] env[69994]: DEBUG nova.virt.hardware [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1123.820115] env[69994]: DEBUG nova.virt.hardware [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1123.820287] env[69994]: DEBUG nova.virt.hardware [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1123.820461] env[69994]: DEBUG nova.virt.hardware [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1123.821376] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc9e39b5-1688-4583-9937-24a129886586 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.830814] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75549798-5063-4b1c-9eda-b4e3825a7966 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.845111] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:82:b8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ddfb706a-add1-4e16-9ac4-d20b16a1df6d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1a71caa6-eaba-4605-b4de-9df7bfa68007', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1123.852700] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1123.853343] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1123.853512] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b70843b7-7d04-473c-af11-30071c408982 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.878032] env[69994]: DEBUG oslo_vmware.api [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242584, 'name': ReconfigVM_Task, 'duration_secs': 0.265503} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.879341] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Reconfigured VM instance instance-0000004f to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1123.884100] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1123.884100] env[69994]: value = "task-3242586" [ 1123.884100] env[69994]: _type = "Task" [ 1123.884100] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.884100] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0524b3ae-e83f-44f2-9447-00307f329494 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.907738] env[69994]: DEBUG oslo_vmware.api [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242585, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.457963} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.916893] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] ca237467-eafc-4c18-a56e-98b94d111c92/ca237467-eafc-4c18-a56e-98b94d111c92.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1123.916893] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1123.916893] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242586, 'name': CreateVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.916893] env[69994]: DEBUG oslo_vmware.api [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1123.916893] env[69994]: value = "task-3242587" [ 1123.916893] env[69994]: _type = "Task" [ 1123.916893] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.916893] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-441555ec-838b-40d3-bebe-020827162445 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.929969] env[69994]: DEBUG oslo_vmware.api [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 1123.929969] env[69994]: value = "task-3242588" [ 1123.929969] env[69994]: _type = "Task" [ 1123.929969] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.930456] env[69994]: INFO nova.compute.manager [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Took 23.04 seconds to build instance. [ 1123.934427] env[69994]: DEBUG oslo_vmware.api [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242587, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.943729] env[69994]: DEBUG oslo_vmware.api [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242588, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.234097] env[69994]: DEBUG nova.compute.utils [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1124.239740] env[69994]: DEBUG nova.compute.manager [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1124.239740] env[69994]: DEBUG nova.network.neutron [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1124.303094] env[69994]: DEBUG nova.policy [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '761ebe718b0f48939612e82c6b1e6766', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a4c158f7555d4606b641be4264d95eaa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1124.405424] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242586, 'name': CreateVM_Task, 'duration_secs': 0.357173} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.407896] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1124.409210] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1124.409210] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1124.412029] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1124.412029] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52b47949-0058-4858-ba97-959d446e6d27 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.414575] env[69994]: DEBUG oslo_vmware.api [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1124.414575] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52c178b5-dab1-2484-e5ce-62020555c18b" [ 1124.414575] env[69994]: _type = "Task" [ 1124.414575] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.422993] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18aebb33-3dc0-4316-927a-7998151a550f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.429740] env[69994]: DEBUG oslo_vmware.api [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52c178b5-dab1-2484-e5ce-62020555c18b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.434381] env[69994]: DEBUG oslo_vmware.api [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242587, 'name': ReconfigVM_Task, 'duration_secs': 0.150666} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.436360] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-647994', 'volume_id': '15a9e42e-3dfd-43a3-9ef9-92e636edfdbc', 'name': 'volume-15a9e42e-3dfd-43a3-9ef9-92e636edfdbc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'e17fcc84-7c86-41b6-88ec-8a35619534b6', 'attached_at': '2025-04-03T08:47:33.000000', 'detached_at': '', 'volume_id': '15a9e42e-3dfd-43a3-9ef9-92e636edfdbc', 'serial': '15a9e42e-3dfd-43a3-9ef9-92e636edfdbc'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1124.436649] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1124.439962] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d1a78ae9-ea1f-4935-9443-19aa753349ba tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "be421d40-9859-4e0d-aef8-a2feb8717a78" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.559s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1124.440717] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aed8e328-3180-439a-9a74-834184895c48 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.443872] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-242d17f3-cfb7-48e1-9d08-782ea2745d17 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.451567] env[69994]: DEBUG oslo_vmware.api [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242588, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083111} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.478685] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1124.479179] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1124.479946] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e1597e4-3e6a-4005-80de-671378fb8d49 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.483048] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b92bb2c-1f5e-468f-a498-5f8a25fca6e5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.485537] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-965eeeff-7327-4ca1-bb99-a986659dcf5a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.501359] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8ff15bf-b50b-490a-892e-dd27550e5ab0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.516261] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] ca237467-eafc-4c18-a56e-98b94d111c92/ca237467-eafc-4c18-a56e-98b94d111c92.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1124.517118] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3b1b0c44-a672-4d3c-a02a-92551ccffe9d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.542506] env[69994]: DEBUG nova.compute.provider_tree [None req-fd086d90-97fb-40b8-abdb-74329571194d tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1124.544337] env[69994]: DEBUG oslo_vmware.api [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 1124.544337] env[69994]: value = "task-3242590" [ 1124.544337] env[69994]: _type = "Task" [ 1124.544337] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.552456] env[69994]: DEBUG oslo_vmware.api [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242590, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.740368] env[69994]: DEBUG nova.compute.manager [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1124.781392] env[69994]: DEBUG nova.network.neutron [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Successfully created port: 940c4e47-4675-43d9-a93a-1e57a95bb56d {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1124.926863] env[69994]: DEBUG oslo_vmware.api [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52c178b5-dab1-2484-e5ce-62020555c18b, 'name': SearchDatastore_Task, 'duration_secs': 0.012777} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.927182] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1124.927419] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1124.927653] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1124.927802] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1124.928345] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1124.930263] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aaeec173-f6e4-4f9f-a540-de579ccfdc3f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.937675] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1124.937820] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1124.938558] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6525b95d-7fc4-420c-85f7-da5023c675cd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.945986] env[69994]: DEBUG oslo_vmware.api [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1124.945986] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5201963b-7653-e225-f8d3-29bccb5bbc1f" [ 1124.945986] env[69994]: _type = "Task" [ 1124.945986] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.954857] env[69994]: DEBUG oslo_vmware.api [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5201963b-7653-e225-f8d3-29bccb5bbc1f, 'name': SearchDatastore_Task, 'duration_secs': 0.008001} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.955641] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d9d3657-36ab-4090-8582-26601aadcd99 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.961948] env[69994]: DEBUG oslo_vmware.api [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1124.961948] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52c4b54d-e4d7-8bcc-3db3-572880d29ced" [ 1124.961948] env[69994]: _type = "Task" [ 1124.961948] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.968737] env[69994]: DEBUG oslo_vmware.api [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52c4b54d-e4d7-8bcc-3db3-572880d29ced, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.049054] env[69994]: DEBUG nova.scheduler.client.report [None req-fd086d90-97fb-40b8-abdb-74329571194d tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1125.070304] env[69994]: DEBUG oslo_vmware.api [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242590, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.236551] env[69994]: DEBUG nova.compute.manager [req-0fbdaf3d-73c5-419d-a095-37773081d4c3 req-e30c349c-4c7e-40f6-906a-ab02ad925b1d service nova] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Received event network-changed-ba2c9555-1cfb-479b-b793-f20615723d77 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1125.237536] env[69994]: DEBUG nova.compute.manager [req-0fbdaf3d-73c5-419d-a095-37773081d4c3 req-e30c349c-4c7e-40f6-906a-ab02ad925b1d service nova] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Refreshing instance network info cache due to event network-changed-ba2c9555-1cfb-479b-b793-f20615723d77. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1125.237536] env[69994]: DEBUG oslo_concurrency.lockutils [req-0fbdaf3d-73c5-419d-a095-37773081d4c3 req-e30c349c-4c7e-40f6-906a-ab02ad925b1d service nova] Acquiring lock "refresh_cache-be421d40-9859-4e0d-aef8-a2feb8717a78" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1125.237536] env[69994]: DEBUG oslo_concurrency.lockutils [req-0fbdaf3d-73c5-419d-a095-37773081d4c3 req-e30c349c-4c7e-40f6-906a-ab02ad925b1d service nova] Acquired lock "refresh_cache-be421d40-9859-4e0d-aef8-a2feb8717a78" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1125.237536] env[69994]: DEBUG nova.network.neutron [req-0fbdaf3d-73c5-419d-a095-37773081d4c3 req-e30c349c-4c7e-40f6-906a-ab02ad925b1d service nova] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Refreshing network info cache for port ba2c9555-1cfb-479b-b793-f20615723d77 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1125.476983] env[69994]: DEBUG oslo_vmware.api [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52c4b54d-e4d7-8bcc-3db3-572880d29ced, 'name': SearchDatastore_Task, 'duration_secs': 0.007866} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.476983] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1125.476983] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] e6acdc45-5e8f-4ff0-9259-3de73a6fdd14/e6acdc45-5e8f-4ff0-9259-3de73a6fdd14.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1125.476983] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9177a9fc-2865-4d07-94ba-e06c5f07a5a8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.483708] env[69994]: DEBUG oslo_vmware.api [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1125.483708] env[69994]: value = "task-3242591" [ 1125.483708] env[69994]: _type = "Task" [ 1125.483708] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.491831] env[69994]: DEBUG oslo_vmware.api [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242591, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.556391] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fd086d90-97fb-40b8-abdb-74329571194d tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.828s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1125.564043] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 5.534s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1125.572989] env[69994]: DEBUG oslo_vmware.api [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242590, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.589122] env[69994]: INFO nova.scheduler.client.report [None req-fd086d90-97fb-40b8-abdb-74329571194d tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Deleted allocations for instance 05993c51-605c-4154-afc1-f3bc5344258c [ 1125.614680] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1125.614994] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1125.615284] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Deleting the datastore file [datastore1] e17fcc84-7c86-41b6-88ec-8a35619534b6 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1125.615643] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-556c2c9f-9600-4633-b0bd-d29af3f48b8b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.623217] env[69994]: DEBUG oslo_vmware.api [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1125.623217] env[69994]: value = "task-3242592" [ 1125.623217] env[69994]: _type = "Task" [ 1125.623217] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.632723] env[69994]: DEBUG oslo_vmware.api [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242592, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.749886] env[69994]: DEBUG nova.compute.manager [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1125.787862] env[69994]: DEBUG nova.virt.hardware [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='bba7ea9c1926c2d2c7dd952b10dbe8e2',container_format='bare',created_at=2025-04-03T08:47:20Z,direct_url=,disk_format='vmdk',id=1ab8f161-e68e-437c-a5bf-624846f8bc40,min_disk=1,min_ram=0,name='tempest-test-snap-684947002',owner='a4c158f7555d4606b641be4264d95eaa',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2025-04-03T08:47:32Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1125.788127] env[69994]: DEBUG nova.virt.hardware [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1125.788292] env[69994]: DEBUG nova.virt.hardware [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1125.788476] env[69994]: DEBUG nova.virt.hardware [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1125.788626] env[69994]: DEBUG nova.virt.hardware [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1125.788806] env[69994]: DEBUG nova.virt.hardware [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1125.789172] env[69994]: DEBUG nova.virt.hardware [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1125.789568] env[69994]: DEBUG nova.virt.hardware [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1125.789870] env[69994]: DEBUG nova.virt.hardware [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1125.790092] env[69994]: DEBUG nova.virt.hardware [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1125.790357] env[69994]: DEBUG nova.virt.hardware [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1125.791503] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b402c03d-246c-417d-b338-0232fa7c8518 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.800924] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1023d157-f33c-411b-b836-7858c1e91b51 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.993907] env[69994]: DEBUG oslo_vmware.api [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242591, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.503248} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.994194] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] e6acdc45-5e8f-4ff0-9259-3de73a6fdd14/e6acdc45-5e8f-4ff0-9259-3de73a6fdd14.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1125.994407] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1125.994656] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8c069bd0-b31f-47a5-9c10-7a3d24c1f8dd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.000763] env[69994]: DEBUG oslo_vmware.api [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1126.000763] env[69994]: value = "task-3242593" [ 1126.000763] env[69994]: _type = "Task" [ 1126.000763] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.009970] env[69994]: DEBUG oslo_vmware.api [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242593, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.065429] env[69994]: DEBUG oslo_vmware.api [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242590, 'name': ReconfigVM_Task, 'duration_secs': 1.353919} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.065694] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Reconfigured VM instance instance-00000065 to attach disk [datastore1] ca237467-eafc-4c18-a56e-98b94d111c92/ca237467-eafc-4c18-a56e-98b94d111c92.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1126.068869] env[69994]: INFO nova.compute.claims [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1126.072309] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-902732d9-21c2-4ead-98a6-a12d55b87caf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.079661] env[69994]: DEBUG oslo_vmware.api [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 1126.079661] env[69994]: value = "task-3242594" [ 1126.079661] env[69994]: _type = "Task" [ 1126.079661] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.084342] env[69994]: DEBUG nova.network.neutron [req-0fbdaf3d-73c5-419d-a095-37773081d4c3 req-e30c349c-4c7e-40f6-906a-ab02ad925b1d service nova] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Updated VIF entry in instance network info cache for port ba2c9555-1cfb-479b-b793-f20615723d77. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1126.084342] env[69994]: DEBUG nova.network.neutron [req-0fbdaf3d-73c5-419d-a095-37773081d4c3 req-e30c349c-4c7e-40f6-906a-ab02ad925b1d service nova] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Updating instance_info_cache with network_info: [{"id": "ba2c9555-1cfb-479b-b793-f20615723d77", "address": "fa:16:3e:3d:fe:51", "network": {"id": "84a79aab-0d1e-4aaf-9422-316cb7d239fb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1732515887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccb64f97e46a4e499df974959db53dcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba2c9555-1c", "ovs_interfaceid": "ba2c9555-1cfb-479b-b793-f20615723d77", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1126.090395] env[69994]: DEBUG oslo_vmware.api [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242594, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.097468] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fd086d90-97fb-40b8-abdb-74329571194d tempest-ServerGroupTestJSON-1347537780 tempest-ServerGroupTestJSON-1347537780-project-member] Lock "05993c51-605c-4154-afc1-f3bc5344258c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.666s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1126.134972] env[69994]: DEBUG oslo_vmware.api [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242592, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.348316} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.134972] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1126.134972] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1126.135182] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1126.135310] env[69994]: INFO nova.compute.manager [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Took 3.39 seconds to destroy the instance on the hypervisor. [ 1126.135563] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1126.135763] env[69994]: DEBUG nova.compute.manager [-] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1126.135835] env[69994]: DEBUG nova.network.neutron [-] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1126.250204] env[69994]: DEBUG nova.compute.manager [req-25e41668-e8cd-467f-8de0-cee05e681fa7 req-e1d70352-54ec-4112-90cb-52168c7ebefa service nova] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Received event network-vif-plugged-940c4e47-4675-43d9-a93a-1e57a95bb56d {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1126.250526] env[69994]: DEBUG oslo_concurrency.lockutils [req-25e41668-e8cd-467f-8de0-cee05e681fa7 req-e1d70352-54ec-4112-90cb-52168c7ebefa service nova] Acquiring lock "65facb63-1323-4905-b107-a5c5782d4a4c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1126.250636] env[69994]: DEBUG oslo_concurrency.lockutils [req-25e41668-e8cd-467f-8de0-cee05e681fa7 req-e1d70352-54ec-4112-90cb-52168c7ebefa service nova] Lock "65facb63-1323-4905-b107-a5c5782d4a4c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1126.250799] env[69994]: DEBUG oslo_concurrency.lockutils [req-25e41668-e8cd-467f-8de0-cee05e681fa7 req-e1d70352-54ec-4112-90cb-52168c7ebefa service nova] Lock "65facb63-1323-4905-b107-a5c5782d4a4c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1126.250965] env[69994]: DEBUG nova.compute.manager [req-25e41668-e8cd-467f-8de0-cee05e681fa7 req-e1d70352-54ec-4112-90cb-52168c7ebefa service nova] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] No waiting events found dispatching network-vif-plugged-940c4e47-4675-43d9-a93a-1e57a95bb56d {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1126.251327] env[69994]: WARNING nova.compute.manager [req-25e41668-e8cd-467f-8de0-cee05e681fa7 req-e1d70352-54ec-4112-90cb-52168c7ebefa service nova] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Received unexpected event network-vif-plugged-940c4e47-4675-43d9-a93a-1e57a95bb56d for instance with vm_state building and task_state spawning. [ 1126.511626] env[69994]: DEBUG oslo_vmware.api [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242593, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.21851} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.511920] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1126.512772] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14f2848e-cdec-4aed-b64a-0fa07dd9a28a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.516683] env[69994]: DEBUG nova.network.neutron [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Successfully updated port: 940c4e47-4675-43d9-a93a-1e57a95bb56d {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1126.541197] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] e6acdc45-5e8f-4ff0-9259-3de73a6fdd14/e6acdc45-5e8f-4ff0-9259-3de73a6fdd14.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1126.541197] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c4ce9037-6a80-4bbb-8d16-15fd72c89522 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.564068] env[69994]: DEBUG oslo_vmware.api [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1126.564068] env[69994]: value = "task-3242595" [ 1126.564068] env[69994]: _type = "Task" [ 1126.564068] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.574018] env[69994]: DEBUG oslo_vmware.api [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242595, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.576611] env[69994]: INFO nova.compute.resource_tracker [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Updating resource usage from migration 26f071cf-8009-45aa-9b70-228b5d75eb73 [ 1126.589605] env[69994]: DEBUG oslo_concurrency.lockutils [req-0fbdaf3d-73c5-419d-a095-37773081d4c3 req-e30c349c-4c7e-40f6-906a-ab02ad925b1d service nova] Releasing lock "refresh_cache-be421d40-9859-4e0d-aef8-a2feb8717a78" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1126.590604] env[69994]: DEBUG oslo_vmware.api [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242594, 'name': Rename_Task, 'duration_secs': 0.140108} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.590604] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1126.590604] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-940c48fa-7842-4e7a-9ae8-5186c8b6ea40 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.597405] env[69994]: DEBUG oslo_vmware.api [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 1126.597405] env[69994]: value = "task-3242596" [ 1126.597405] env[69994]: _type = "Task" [ 1126.597405] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.608224] env[69994]: DEBUG oslo_vmware.api [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242596, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.776056] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-135132e0-21f5-4357-a875-4f0ce514bd2a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.785680] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef476d49-4268-445e-9728-1838758da8c6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.819858] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb88747d-514c-4483-83f5-32236873e773 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.828342] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7a202a7-2a7f-464b-89ba-620f099a62b5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.845368] env[69994]: DEBUG nova.compute.provider_tree [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1127.023688] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "refresh_cache-65facb63-1323-4905-b107-a5c5782d4a4c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1127.023978] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquired lock "refresh_cache-65facb63-1323-4905-b107-a5c5782d4a4c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1127.024137] env[69994]: DEBUG nova.network.neutron [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1127.074758] env[69994]: DEBUG oslo_vmware.api [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242595, 'name': ReconfigVM_Task, 'duration_secs': 0.300206} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.075050] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Reconfigured VM instance instance-0000005f to attach disk [datastore2] e6acdc45-5e8f-4ff0-9259-3de73a6fdd14/e6acdc45-5e8f-4ff0-9259-3de73a6fdd14.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1127.075692] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e46cefb0-9449-4efb-b4c2-34312b522f7a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.082088] env[69994]: DEBUG oslo_vmware.api [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1127.082088] env[69994]: value = "task-3242597" [ 1127.082088] env[69994]: _type = "Task" [ 1127.082088] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.093025] env[69994]: DEBUG oslo_vmware.api [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242597, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.108882] env[69994]: DEBUG oslo_vmware.api [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242596, 'name': PowerOnVM_Task} progress is 71%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.171365] env[69994]: DEBUG nova.network.neutron [-] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1127.276237] env[69994]: DEBUG nova.compute.manager [req-58112690-e279-4e5f-b5ef-dd1f4d5d64c6 req-23ae08b2-5db7-4417-b22e-8f91d2716f9d service nova] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Received event network-vif-deleted-b2521bc7-942e-4d29-bc89-0fd13a02f783 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1127.348261] env[69994]: DEBUG nova.scheduler.client.report [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1127.593337] env[69994]: DEBUG oslo_vmware.api [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242597, 'name': Rename_Task, 'duration_secs': 0.145775} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.594996] env[69994]: DEBUG nova.network.neutron [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1127.597508] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1127.597855] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-016817c5-b377-4632-823b-c7b049a9c605 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.608947] env[69994]: DEBUG oslo_vmware.api [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242596, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.609923] env[69994]: DEBUG oslo_vmware.api [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1127.609923] env[69994]: value = "task-3242598" [ 1127.609923] env[69994]: _type = "Task" [ 1127.609923] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.619079] env[69994]: DEBUG oslo_vmware.api [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242598, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.677381] env[69994]: INFO nova.compute.manager [-] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Took 1.54 seconds to deallocate network for instance. [ 1127.716043] env[69994]: DEBUG oslo_concurrency.lockutils [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquiring lock "03e58b14-12fe-46e5-b483-4176d5a43c0e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1127.716262] env[69994]: DEBUG oslo_concurrency.lockutils [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lock "03e58b14-12fe-46e5-b483-4176d5a43c0e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1127.769309] env[69994]: DEBUG nova.network.neutron [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Updating instance_info_cache with network_info: [{"id": "940c4e47-4675-43d9-a93a-1e57a95bb56d", "address": "fa:16:3e:e0:e3:2a", "network": {"id": "7880c31a-48c5-419f-856f-539c513f7147", "bridge": "br-int", "label": "tempest-ImagesTestJSON-152259375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4c158f7555d4606b641be4264d95eaa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap940c4e47-46", "ovs_interfaceid": "940c4e47-4675-43d9-a93a-1e57a95bb56d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1127.853442] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.290s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1127.853685] env[69994]: INFO nova.compute.manager [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Migrating [ 1128.118157] env[69994]: DEBUG oslo_vmware.api [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242596, 'name': PowerOnVM_Task, 'duration_secs': 1.083661} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.121491] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1128.121770] env[69994]: INFO nova.compute.manager [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Took 9.19 seconds to spawn the instance on the hypervisor. [ 1128.122570] env[69994]: DEBUG nova.compute.manager [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1128.122995] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d1e7977-76e5-4789-afcd-5a108ee1d5e6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.131423] env[69994]: DEBUG oslo_vmware.api [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242598, 'name': PowerOnVM_Task, 'duration_secs': 0.494132} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.134329] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1128.134588] env[69994]: DEBUG nova.compute.manager [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1128.135576] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0120de74-f979-41b0-8200-623eb0b6a5cd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.219327] env[69994]: DEBUG nova.compute.manager [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1128.241065] env[69994]: INFO nova.compute.manager [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Took 0.56 seconds to detach 1 volumes for instance. [ 1128.271574] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Releasing lock "refresh_cache-65facb63-1323-4905-b107-a5c5782d4a4c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1128.272998] env[69994]: DEBUG nova.compute.manager [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Instance network_info: |[{"id": "940c4e47-4675-43d9-a93a-1e57a95bb56d", "address": "fa:16:3e:e0:e3:2a", "network": {"id": "7880c31a-48c5-419f-856f-539c513f7147", "bridge": "br-int", "label": "tempest-ImagesTestJSON-152259375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4c158f7555d4606b641be4264d95eaa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap940c4e47-46", "ovs_interfaceid": "940c4e47-4675-43d9-a93a-1e57a95bb56d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1128.272998] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e0:e3:2a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4728adca-2846-416a-91a3-deb898faf1f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '940c4e47-4675-43d9-a93a-1e57a95bb56d', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1128.280415] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1128.281169] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1128.281412] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9a679a65-1133-4b04-89a7-887b147e48f3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.300935] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1128.300935] env[69994]: value = "task-3242599" [ 1128.300935] env[69994]: _type = "Task" [ 1128.300935] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.309271] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242599, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.370809] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "refresh_cache-d8d2958c-e44c-4796-becc-c572057f7ba5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.370809] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquired lock "refresh_cache-d8d2958c-e44c-4796-becc-c572057f7ba5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1128.370809] env[69994]: DEBUG nova.network.neutron [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1128.648251] env[69994]: INFO nova.compute.manager [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Took 27.48 seconds to build instance. [ 1128.651621] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1128.651868] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1128.652093] env[69994]: DEBUG nova.objects.instance [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69994) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1128.712988] env[69994]: DEBUG nova.compute.manager [req-d5c9a047-3242-4140-9b52-14e08fae2b8d req-d16d4c98-60b3-45da-be0a-cea59f4bb07b service nova] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Received event network-changed-940c4e47-4675-43d9-a93a-1e57a95bb56d {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1128.713146] env[69994]: DEBUG nova.compute.manager [req-d5c9a047-3242-4140-9b52-14e08fae2b8d req-d16d4c98-60b3-45da-be0a-cea59f4bb07b service nova] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Refreshing instance network info cache due to event network-changed-940c4e47-4675-43d9-a93a-1e57a95bb56d. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1128.713919] env[69994]: DEBUG oslo_concurrency.lockutils [req-d5c9a047-3242-4140-9b52-14e08fae2b8d req-d16d4c98-60b3-45da-be0a-cea59f4bb07b service nova] Acquiring lock "refresh_cache-65facb63-1323-4905-b107-a5c5782d4a4c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.714141] env[69994]: DEBUG oslo_concurrency.lockutils [req-d5c9a047-3242-4140-9b52-14e08fae2b8d req-d16d4c98-60b3-45da-be0a-cea59f4bb07b service nova] Acquired lock "refresh_cache-65facb63-1323-4905-b107-a5c5782d4a4c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1128.714397] env[69994]: DEBUG nova.network.neutron [req-d5c9a047-3242-4140-9b52-14e08fae2b8d req-d16d4c98-60b3-45da-be0a-cea59f4bb07b service nova] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Refreshing network info cache for port 940c4e47-4675-43d9-a93a-1e57a95bb56d {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1128.742307] env[69994]: DEBUG oslo_concurrency.lockutils [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1128.747350] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1128.813198] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242599, 'name': CreateVM_Task, 'duration_secs': 0.314374} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.813434] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1128.814248] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1ab8f161-e68e-437c-a5bf-624846f8bc40" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.814471] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1ab8f161-e68e-437c-a5bf-624846f8bc40" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1128.814921] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1ab8f161-e68e-437c-a5bf-624846f8bc40" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1128.815257] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b78937a-9833-4d9b-827c-e834e6b40dfe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.820066] env[69994]: DEBUG oslo_vmware.api [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1128.820066] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]528123cc-274f-4e6d-f7d8-488ada4e6c3c" [ 1128.820066] env[69994]: _type = "Task" [ 1128.820066] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.835910] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1ab8f161-e68e-437c-a5bf-624846f8bc40" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1128.836202] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Processing image 1ab8f161-e68e-437c-a5bf-624846f8bc40 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1128.836478] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1ab8f161-e68e-437c-a5bf-624846f8bc40/1ab8f161-e68e-437c-a5bf-624846f8bc40.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.836658] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1ab8f161-e68e-437c-a5bf-624846f8bc40/1ab8f161-e68e-437c-a5bf-624846f8bc40.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1128.836857] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1128.837111] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e1c23da6-b90a-4ef8-b6f3-5992d9fe6139 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.844831] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1128.845016] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1128.845705] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3e96e9d-4b1d-42cc-abfd-c3242a043df0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.850887] env[69994]: DEBUG oslo_vmware.api [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1128.850887] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52ae9117-49f9-a617-98e0-44725de7bcd6" [ 1128.850887] env[69994]: _type = "Task" [ 1128.850887] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.858422] env[69994]: DEBUG oslo_vmware.api [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52ae9117-49f9-a617-98e0-44725de7bcd6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.153288] env[69994]: DEBUG nova.network.neutron [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Updating instance_info_cache with network_info: [{"id": "63b08705-8a3c-4011-9ebc-15f8463de275", "address": "fa:16:3e:da:d4:f3", "network": {"id": "0e925a40-2706-4137-8dd4-eeed26ae606e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1885024452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "377f65074c2442588aee091b5165e1cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63b08705-8a", "ovs_interfaceid": "63b08705-8a3c-4011-9ebc-15f8463de275", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1129.155175] env[69994]: DEBUG oslo_concurrency.lockutils [None req-98c2cb52-a702-4fdd-90f4-f9696880b5ca tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "ca237467-eafc-4c18-a56e-98b94d111c92" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.996s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1129.360967] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Preparing fetch location {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1129.361286] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Fetch image to [datastore2] OSTACK_IMG_30835eb1-5d5a-49ee-b377-91646efdf1f2/OSTACK_IMG_30835eb1-5d5a-49ee-b377-91646efdf1f2.vmdk {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1129.361491] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Downloading stream optimized image 1ab8f161-e68e-437c-a5bf-624846f8bc40 to [datastore2] OSTACK_IMG_30835eb1-5d5a-49ee-b377-91646efdf1f2/OSTACK_IMG_30835eb1-5d5a-49ee-b377-91646efdf1f2.vmdk on the data store datastore2 as vApp {{(pid=69994) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1129.361712] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Downloading image file data 1ab8f161-e68e-437c-a5bf-624846f8bc40 to the ESX as VM named 'OSTACK_IMG_30835eb1-5d5a-49ee-b377-91646efdf1f2' {{(pid=69994) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1129.403233] env[69994]: DEBUG nova.network.neutron [req-d5c9a047-3242-4140-9b52-14e08fae2b8d req-d16d4c98-60b3-45da-be0a-cea59f4bb07b service nova] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Updated VIF entry in instance network info cache for port 940c4e47-4675-43d9-a93a-1e57a95bb56d. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1129.403369] env[69994]: DEBUG nova.network.neutron [req-d5c9a047-3242-4140-9b52-14e08fae2b8d req-d16d4c98-60b3-45da-be0a-cea59f4bb07b service nova] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Updating instance_info_cache with network_info: [{"id": "940c4e47-4675-43d9-a93a-1e57a95bb56d", "address": "fa:16:3e:e0:e3:2a", "network": {"id": "7880c31a-48c5-419f-856f-539c513f7147", "bridge": "br-int", "label": "tempest-ImagesTestJSON-152259375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4c158f7555d4606b641be4264d95eaa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap940c4e47-46", "ovs_interfaceid": "940c4e47-4675-43d9-a93a-1e57a95bb56d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1129.434148] env[69994]: DEBUG oslo_vmware.rw_handles [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1129.434148] env[69994]: value = "resgroup-9" [ 1129.434148] env[69994]: _type = "ResourcePool" [ 1129.434148] env[69994]: }. {{(pid=69994) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1129.434404] env[69994]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-8418828b-e38c-42db-a6d8-ed3f94ce9839 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.455057] env[69994]: DEBUG oslo_vmware.rw_handles [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lease: (returnval){ [ 1129.455057] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52d42498-44b6-1e61-d08e-df031389ede4" [ 1129.455057] env[69994]: _type = "HttpNfcLease" [ 1129.455057] env[69994]: } obtained for vApp import into resource pool (val){ [ 1129.455057] env[69994]: value = "resgroup-9" [ 1129.455057] env[69994]: _type = "ResourcePool" [ 1129.455057] env[69994]: }. {{(pid=69994) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1129.455346] env[69994]: DEBUG oslo_vmware.api [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the lease: (returnval){ [ 1129.455346] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52d42498-44b6-1e61-d08e-df031389ede4" [ 1129.455346] env[69994]: _type = "HttpNfcLease" [ 1129.455346] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1129.461368] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1129.461368] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52d42498-44b6-1e61-d08e-df031389ede4" [ 1129.461368] env[69994]: _type = "HttpNfcLease" [ 1129.461368] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1129.655378] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Releasing lock "refresh_cache-d8d2958c-e44c-4796-becc-c572057f7ba5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1129.660237] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2761ed1c-e70d-4336-b341-f217aa830abf tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.008s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1129.661368] env[69994]: DEBUG oslo_concurrency.lockutils [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.919s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1129.662901] env[69994]: INFO nova.compute.claims [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1129.906403] env[69994]: DEBUG oslo_concurrency.lockutils [req-d5c9a047-3242-4140-9b52-14e08fae2b8d req-d16d4c98-60b3-45da-be0a-cea59f4bb07b service nova] Releasing lock "refresh_cache-65facb63-1323-4905-b107-a5c5782d4a4c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1129.963560] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1129.963560] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52d42498-44b6-1e61-d08e-df031389ede4" [ 1129.963560] env[69994]: _type = "HttpNfcLease" [ 1129.963560] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1130.145646] env[69994]: DEBUG nova.compute.manager [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Stashing vm_state: active {{(pid=69994) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1130.465504] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1130.465504] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52d42498-44b6-1e61-d08e-df031389ede4" [ 1130.465504] env[69994]: _type = "HttpNfcLease" [ 1130.465504] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1130.668312] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1130.840187] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9590ab43-ee56-4b8c-a4e2-006614d64ae0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.850018] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb3266fa-f447-4670-8bf8-81b0f5685229 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.882660] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab850383-d21c-4ead-bc72-a5965b48641b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.890685] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0682f7fe-c299-453a-ba37-b715cede8d86 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.905927] env[69994]: DEBUG nova.compute.provider_tree [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1130.964601] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1130.964601] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52d42498-44b6-1e61-d08e-df031389ede4" [ 1130.964601] env[69994]: _type = "HttpNfcLease" [ 1130.964601] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1130.964996] env[69994]: DEBUG oslo_vmware.rw_handles [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1130.964996] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52d42498-44b6-1e61-d08e-df031389ede4" [ 1130.964996] env[69994]: _type = "HttpNfcLease" [ 1130.964996] env[69994]: }. {{(pid=69994) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1130.965805] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1c7562f-dd2c-4746-8e32-d3a6c14ab23b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.972963] env[69994]: DEBUG oslo_vmware.rw_handles [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523e3cf3-9159-3f53-5c1b-9d1d01130632/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1130.973094] env[69994]: DEBUG oslo_vmware.rw_handles [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523e3cf3-9159-3f53-5c1b-9d1d01130632/disk-0.vmdk. {{(pid=69994) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1131.038266] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-3fb1b6b7-4df6-4787-8a7b-f4c159817ff9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.168535] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0667da08-1932-4882-9df6-8d6dde3654f7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.189470] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Updating instance 'd8d2958c-e44c-4796-becc-c572057f7ba5' progress to 0 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1131.409622] env[69994]: DEBUG nova.scheduler.client.report [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1131.695665] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1131.697417] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f8a8c830-fbd0-4a42-b1de-9eb3f91d4654 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.705403] env[69994]: DEBUG oslo_vmware.api [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1131.705403] env[69994]: value = "task-3242601" [ 1131.705403] env[69994]: _type = "Task" [ 1131.705403] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.714613] env[69994]: DEBUG oslo_vmware.api [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242601, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.916130] env[69994]: DEBUG oslo_concurrency.lockutils [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.254s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1131.917512] env[69994]: DEBUG nova.compute.manager [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1131.921050] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.174s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1131.921340] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1131.924270] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 1.256s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1131.947463] env[69994]: INFO nova.scheduler.client.report [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Deleted allocations for instance e17fcc84-7c86-41b6-88ec-8a35619534b6 [ 1132.126990] env[69994]: DEBUG oslo_vmware.rw_handles [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Completed reading data from the image iterator. {{(pid=69994) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1132.127279] env[69994]: DEBUG oslo_vmware.rw_handles [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523e3cf3-9159-3f53-5c1b-9d1d01130632/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1132.128358] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73164558-34d2-448d-a04c-3db6ce213ecd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.136058] env[69994]: DEBUG oslo_vmware.rw_handles [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523e3cf3-9159-3f53-5c1b-9d1d01130632/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1132.136329] env[69994]: DEBUG oslo_vmware.rw_handles [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523e3cf3-9159-3f53-5c1b-9d1d01130632/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1132.136606] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-9aa96a52-8bd2-402e-9ad9-1d81ed146175 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.216752] env[69994]: DEBUG oslo_vmware.api [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242601, 'name': PowerOffVM_Task, 'duration_secs': 0.273064} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.217014] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1132.217207] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Updating instance 'd8d2958c-e44c-4796-becc-c572057f7ba5' progress to 17 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1132.428676] env[69994]: DEBUG nova.compute.utils [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1132.429709] env[69994]: DEBUG nova.compute.manager [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1132.429882] env[69994]: DEBUG nova.network.neutron [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1132.433947] env[69994]: INFO nova.compute.claims [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1132.453987] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e1468f5f-9aec-43de-8d4f-b5ee8549daf0 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "e17fcc84-7c86-41b6-88ec-8a35619534b6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.221s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1132.469934] env[69994]: DEBUG nova.policy [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '98f7c85a23ae4567befac26d062aeeab', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '352ad5b68db1480eb657935e006d7dbb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1132.588785] env[69994]: DEBUG oslo_vmware.rw_handles [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523e3cf3-9159-3f53-5c1b-9d1d01130632/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1132.589028] env[69994]: INFO nova.virt.vmwareapi.images [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Downloaded image file data 1ab8f161-e68e-437c-a5bf-624846f8bc40 [ 1132.589925] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d2a9cd0-2f5b-4d5f-8fb0-9c1231bcca27 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.605557] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d9c87f4d-b4be-4986-b1c6-85fea39576eb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.705059] env[69994]: INFO nova.virt.vmwareapi.images [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] The imported VM was unregistered [ 1132.707386] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Caching image {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1132.707894] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Creating directory with path [datastore2] devstack-image-cache_base/1ab8f161-e68e-437c-a5bf-624846f8bc40 {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1132.707894] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ec663d32-4110-43b4-b092-922fdfaae510 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.723730] env[69994]: DEBUG nova.virt.hardware [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1132.723981] env[69994]: DEBUG nova.virt.hardware [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1132.724195] env[69994]: DEBUG nova.virt.hardware [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1132.724404] env[69994]: DEBUG nova.virt.hardware [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1132.725319] env[69994]: DEBUG nova.virt.hardware [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1132.725319] env[69994]: DEBUG nova.virt.hardware [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1132.725319] env[69994]: DEBUG nova.virt.hardware [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1132.725319] env[69994]: DEBUG nova.virt.hardware [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1132.725319] env[69994]: DEBUG nova.virt.hardware [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1132.725666] env[69994]: DEBUG nova.virt.hardware [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1132.725666] env[69994]: DEBUG nova.virt.hardware [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1132.731951] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ea6b3d1-0500-4eed-8c61-b1700fd745e9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.742112] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Created directory with path [datastore2] devstack-image-cache_base/1ab8f161-e68e-437c-a5bf-624846f8bc40 {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1132.743033] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_30835eb1-5d5a-49ee-b377-91646efdf1f2/OSTACK_IMG_30835eb1-5d5a-49ee-b377-91646efdf1f2.vmdk to [datastore2] devstack-image-cache_base/1ab8f161-e68e-437c-a5bf-624846f8bc40/1ab8f161-e68e-437c-a5bf-624846f8bc40.vmdk. {{(pid=69994) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1132.743033] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-f7c9695f-8221-4ae8-b137-d50364baf45f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.751029] env[69994]: DEBUG oslo_vmware.api [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1132.751029] env[69994]: value = "task-3242603" [ 1132.751029] env[69994]: _type = "Task" [ 1132.751029] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.751635] env[69994]: DEBUG oslo_vmware.api [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1132.751635] env[69994]: value = "task-3242604" [ 1132.751635] env[69994]: _type = "Task" [ 1132.751635] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.753299] env[69994]: DEBUG nova.network.neutron [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Successfully created port: fc354355-eb77-47cd-9f5b-89c8e6616b1d {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1132.770174] env[69994]: DEBUG oslo_vmware.api [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242603, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.773322] env[69994]: DEBUG oslo_vmware.api [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242604, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.932785] env[69994]: DEBUG nova.compute.manager [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1132.939399] env[69994]: INFO nova.compute.resource_tracker [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Updating resource usage from migration 3ee157d3-094d-4414-983a-239b7b1ea87f [ 1133.098556] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a60f10fa-25d5-41cf-adc0-ad1c0430cf85 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.109034] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2302d2cc-5a6c-467b-bc52-ab1784096116 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.144121] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ee79e92-606a-4e1e-830c-eed82610e105 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.153884] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-961b4785-8a6a-4350-9999-14e93ed28ecd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.171030] env[69994]: DEBUG nova.compute.provider_tree [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1133.261351] env[69994]: DEBUG oslo_vmware.api [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242603, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.271247] env[69994]: DEBUG oslo_vmware.api [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242604, 'name': ReconfigVM_Task, 'duration_secs': 0.203191} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.271569] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Updating instance 'd8d2958c-e44c-4796-becc-c572057f7ba5' progress to 33 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1133.674741] env[69994]: DEBUG nova.scheduler.client.report [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1133.765207] env[69994]: DEBUG oslo_vmware.api [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242603, 'name': MoveVirtualDisk_Task} progress is 38%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.778620] env[69994]: DEBUG nova.virt.hardware [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1133.778826] env[69994]: DEBUG nova.virt.hardware [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1133.778929] env[69994]: DEBUG nova.virt.hardware [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1133.779076] env[69994]: DEBUG nova.virt.hardware [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1133.779228] env[69994]: DEBUG nova.virt.hardware [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1133.779373] env[69994]: DEBUG nova.virt.hardware [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1133.779610] env[69994]: DEBUG nova.virt.hardware [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1133.779782] env[69994]: DEBUG nova.virt.hardware [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1133.779947] env[69994]: DEBUG nova.virt.hardware [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1133.780123] env[69994]: DEBUG nova.virt.hardware [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1133.780296] env[69994]: DEBUG nova.virt.hardware [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1133.785785] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Reconfiguring VM instance instance-00000063 to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1133.786105] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ffe7f54f-40fe-42ca-9252-3659bad92339 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.809281] env[69994]: DEBUG oslo_vmware.api [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1133.809281] env[69994]: value = "task-3242605" [ 1133.809281] env[69994]: _type = "Task" [ 1133.809281] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.825131] env[69994]: DEBUG oslo_vmware.api [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242605, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.944064] env[69994]: DEBUG nova.compute.manager [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1133.966743] env[69994]: DEBUG nova.virt.hardware [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1133.966998] env[69994]: DEBUG nova.virt.hardware [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1133.967208] env[69994]: DEBUG nova.virt.hardware [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1133.967414] env[69994]: DEBUG nova.virt.hardware [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1133.967597] env[69994]: DEBUG nova.virt.hardware [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1133.967764] env[69994]: DEBUG nova.virt.hardware [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1133.967992] env[69994]: DEBUG nova.virt.hardware [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1133.968191] env[69994]: DEBUG nova.virt.hardware [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1133.968383] env[69994]: DEBUG nova.virt.hardware [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1133.968551] env[69994]: DEBUG nova.virt.hardware [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1133.968723] env[69994]: DEBUG nova.virt.hardware [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1133.969682] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-374b495a-b84e-4f2e-aff3-ff29271dafc9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.980133] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c19116bb-6978-44f2-870f-b3f5ddf18fdd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.151794] env[69994]: DEBUG nova.compute.manager [req-77a0deb3-10ec-455e-8d5f-7ac75ab62fdb req-f3ffdd87-bd0f-429a-8073-99b952d0eecd service nova] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Received event network-vif-plugged-fc354355-eb77-47cd-9f5b-89c8e6616b1d {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1134.152271] env[69994]: DEBUG oslo_concurrency.lockutils [req-77a0deb3-10ec-455e-8d5f-7ac75ab62fdb req-f3ffdd87-bd0f-429a-8073-99b952d0eecd service nova] Acquiring lock "03e58b14-12fe-46e5-b483-4176d5a43c0e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1134.152517] env[69994]: DEBUG oslo_concurrency.lockutils [req-77a0deb3-10ec-455e-8d5f-7ac75ab62fdb req-f3ffdd87-bd0f-429a-8073-99b952d0eecd service nova] Lock "03e58b14-12fe-46e5-b483-4176d5a43c0e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1134.152715] env[69994]: DEBUG oslo_concurrency.lockutils [req-77a0deb3-10ec-455e-8d5f-7ac75ab62fdb req-f3ffdd87-bd0f-429a-8073-99b952d0eecd service nova] Lock "03e58b14-12fe-46e5-b483-4176d5a43c0e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1134.152889] env[69994]: DEBUG nova.compute.manager [req-77a0deb3-10ec-455e-8d5f-7ac75ab62fdb req-f3ffdd87-bd0f-429a-8073-99b952d0eecd service nova] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] No waiting events found dispatching network-vif-plugged-fc354355-eb77-47cd-9f5b-89c8e6616b1d {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1134.153078] env[69994]: WARNING nova.compute.manager [req-77a0deb3-10ec-455e-8d5f-7ac75ab62fdb req-f3ffdd87-bd0f-429a-8073-99b952d0eecd service nova] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Received unexpected event network-vif-plugged-fc354355-eb77-47cd-9f5b-89c8e6616b1d for instance with vm_state building and task_state spawning. [ 1134.180978] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.257s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1134.181219] env[69994]: INFO nova.compute.manager [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Migrating [ 1134.213432] env[69994]: DEBUG oslo_concurrency.lockutils [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "d31f167f-8248-4aef-aa3c-6bc7259e1a80" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1134.213675] env[69994]: DEBUG oslo_concurrency.lockutils [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "d31f167f-8248-4aef-aa3c-6bc7259e1a80" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1134.266872] env[69994]: DEBUG oslo_vmware.api [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242603, 'name': MoveVirtualDisk_Task} progress is 57%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.322983] env[69994]: DEBUG oslo_vmware.api [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242605, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.323960] env[69994]: DEBUG nova.network.neutron [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Successfully updated port: fc354355-eb77-47cd-9f5b-89c8e6616b1d {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1134.696529] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "refresh_cache-ca237467-eafc-4c18-a56e-98b94d111c92" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1134.697389] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquired lock "refresh_cache-ca237467-eafc-4c18-a56e-98b94d111c92" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1134.697389] env[69994]: DEBUG nova.network.neutron [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1134.715946] env[69994]: DEBUG nova.compute.manager [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1134.767901] env[69994]: DEBUG oslo_vmware.api [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242603, 'name': MoveVirtualDisk_Task} progress is 80%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.823781] env[69994]: DEBUG oslo_vmware.api [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242605, 'name': ReconfigVM_Task, 'duration_secs': 0.592215} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.824138] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Reconfigured VM instance instance-00000063 to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1134.824976] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13473097-49cb-4695-a445-95ad23387693 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.829808] env[69994]: DEBUG oslo_concurrency.lockutils [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquiring lock "refresh_cache-03e58b14-12fe-46e5-b483-4176d5a43c0e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1134.829968] env[69994]: DEBUG oslo_concurrency.lockutils [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquired lock "refresh_cache-03e58b14-12fe-46e5-b483-4176d5a43c0e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1134.830138] env[69994]: DEBUG nova.network.neutron [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1134.851215] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Reconfiguring VM instance instance-00000063 to attach disk [datastore2] d8d2958c-e44c-4796-becc-c572057f7ba5/d8d2958c-e44c-4796-becc-c572057f7ba5.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1134.852335] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f6461920-69fa-4e86-85b6-71594debf138 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.874744] env[69994]: DEBUG oslo_vmware.api [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1134.874744] env[69994]: value = "task-3242606" [ 1134.874744] env[69994]: _type = "Task" [ 1134.874744] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.886023] env[69994]: DEBUG oslo_vmware.api [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242606, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.240621] env[69994]: DEBUG oslo_concurrency.lockutils [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1135.240936] env[69994]: DEBUG oslo_concurrency.lockutils [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1135.242809] env[69994]: INFO nova.compute.claims [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1135.263522] env[69994]: DEBUG oslo_vmware.api [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242603, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.496712} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.263805] env[69994]: INFO nova.virt.vmwareapi.ds_util [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_30835eb1-5d5a-49ee-b377-91646efdf1f2/OSTACK_IMG_30835eb1-5d5a-49ee-b377-91646efdf1f2.vmdk to [datastore2] devstack-image-cache_base/1ab8f161-e68e-437c-a5bf-624846f8bc40/1ab8f161-e68e-437c-a5bf-624846f8bc40.vmdk. [ 1135.263998] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Cleaning up location [datastore2] OSTACK_IMG_30835eb1-5d5a-49ee-b377-91646efdf1f2 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1135.264177] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_30835eb1-5d5a-49ee-b377-91646efdf1f2 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1135.264433] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-32df2c19-7bff-4ec8-ad97-ead2559531f8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.270538] env[69994]: DEBUG oslo_vmware.api [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1135.270538] env[69994]: value = "task-3242607" [ 1135.270538] env[69994]: _type = "Task" [ 1135.270538] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.278395] env[69994]: DEBUG oslo_vmware.api [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242607, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.367308] env[69994]: DEBUG nova.network.neutron [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1135.386493] env[69994]: DEBUG oslo_vmware.api [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242606, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.445877] env[69994]: DEBUG nova.network.neutron [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Updating instance_info_cache with network_info: [{"id": "c53c19bc-a6d9-4b00-907a-97b4755bb119", "address": "fa:16:3e:81:39:4b", "network": {"id": "14e5b992-c393-4acc-ab6d-ad5983fe2729", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1321568807-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee188ea80c9847188df8b8482b7c6ec7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc53c19bc-a6", "ovs_interfaceid": "c53c19bc-a6d9-4b00-907a-97b4755bb119", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1135.508921] env[69994]: DEBUG nova.network.neutron [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Updating instance_info_cache with network_info: [{"id": "fc354355-eb77-47cd-9f5b-89c8e6616b1d", "address": "fa:16:3e:d3:d6:b9", "network": {"id": "24596b1b-4e9c-466d-85f9-ff79760278bd", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1747505141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "352ad5b68db1480eb657935e006d7dbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc354355-eb", "ovs_interfaceid": "fc354355-eb77-47cd-9f5b-89c8e6616b1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1135.780726] env[69994]: DEBUG oslo_vmware.api [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242607, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.035434} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.781103] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1135.781103] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1ab8f161-e68e-437c-a5bf-624846f8bc40/1ab8f161-e68e-437c-a5bf-624846f8bc40.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1135.781255] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/1ab8f161-e68e-437c-a5bf-624846f8bc40/1ab8f161-e68e-437c-a5bf-624846f8bc40.vmdk to [datastore2] 65facb63-1323-4905-b107-a5c5782d4a4c/65facb63-1323-4905-b107-a5c5782d4a4c.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1135.781519] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-58e12b55-cabf-45c7-917d-4fd57ca12d11 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.788080] env[69994]: DEBUG oslo_vmware.api [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1135.788080] env[69994]: value = "task-3242608" [ 1135.788080] env[69994]: _type = "Task" [ 1135.788080] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.797852] env[69994]: DEBUG oslo_vmware.api [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242608, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.884741] env[69994]: DEBUG oslo_vmware.api [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242606, 'name': ReconfigVM_Task, 'duration_secs': 0.690024} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.885097] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Reconfigured VM instance instance-00000063 to attach disk [datastore2] d8d2958c-e44c-4796-becc-c572057f7ba5/d8d2958c-e44c-4796-becc-c572057f7ba5.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1135.885321] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Updating instance 'd8d2958c-e44c-4796-becc-c572057f7ba5' progress to 50 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1135.948685] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Releasing lock "refresh_cache-ca237467-eafc-4c18-a56e-98b94d111c92" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1136.011409] env[69994]: DEBUG oslo_concurrency.lockutils [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Releasing lock "refresh_cache-03e58b14-12fe-46e5-b483-4176d5a43c0e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1136.011723] env[69994]: DEBUG nova.compute.manager [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Instance network_info: |[{"id": "fc354355-eb77-47cd-9f5b-89c8e6616b1d", "address": "fa:16:3e:d3:d6:b9", "network": {"id": "24596b1b-4e9c-466d-85f9-ff79760278bd", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1747505141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "352ad5b68db1480eb657935e006d7dbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc354355-eb", "ovs_interfaceid": "fc354355-eb77-47cd-9f5b-89c8e6616b1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1136.012194] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:d6:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24ec44b7-0acf-4ff9-8bb3-4641b74af7a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fc354355-eb77-47cd-9f5b-89c8e6616b1d', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1136.020163] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Creating folder: Project (352ad5b68db1480eb657935e006d7dbb). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1136.020465] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5c4f8511-f97b-4c4a-b20f-248bb54ee25f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.034598] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Created folder: Project (352ad5b68db1480eb657935e006d7dbb) in parent group-v647729. [ 1136.034815] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Creating folder: Instances. Parent ref: group-v648020. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1136.035073] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aa5ef658-1d12-4b46-a9af-4102b286caf8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.047635] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Created folder: Instances in parent group-v648020. [ 1136.047883] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1136.048097] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1136.048377] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3391cc51-27e2-438e-ae50-639a43da81d5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.068906] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1136.068906] env[69994]: value = "task-3242611" [ 1136.068906] env[69994]: _type = "Task" [ 1136.068906] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.077197] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242611, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.180953] env[69994]: DEBUG nova.compute.manager [req-4854bd54-c352-4cfe-b102-138ebe654a68 req-d1e85fa9-067a-4809-af4f-4011baa9e3c3 service nova] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Received event network-changed-fc354355-eb77-47cd-9f5b-89c8e6616b1d {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1136.181170] env[69994]: DEBUG nova.compute.manager [req-4854bd54-c352-4cfe-b102-138ebe654a68 req-d1e85fa9-067a-4809-af4f-4011baa9e3c3 service nova] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Refreshing instance network info cache due to event network-changed-fc354355-eb77-47cd-9f5b-89c8e6616b1d. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1136.181438] env[69994]: DEBUG oslo_concurrency.lockutils [req-4854bd54-c352-4cfe-b102-138ebe654a68 req-d1e85fa9-067a-4809-af4f-4011baa9e3c3 service nova] Acquiring lock "refresh_cache-03e58b14-12fe-46e5-b483-4176d5a43c0e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.181583] env[69994]: DEBUG oslo_concurrency.lockutils [req-4854bd54-c352-4cfe-b102-138ebe654a68 req-d1e85fa9-067a-4809-af4f-4011baa9e3c3 service nova] Acquired lock "refresh_cache-03e58b14-12fe-46e5-b483-4176d5a43c0e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1136.181768] env[69994]: DEBUG nova.network.neutron [req-4854bd54-c352-4cfe-b102-138ebe654a68 req-d1e85fa9-067a-4809-af4f-4011baa9e3c3 service nova] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Refreshing network info cache for port fc354355-eb77-47cd-9f5b-89c8e6616b1d {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1136.300020] env[69994]: DEBUG oslo_vmware.api [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242608, 'name': CopyVirtualDisk_Task} progress is 24%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.395223] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97543f9d-803c-4db6-85bb-bafff51376e8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.420442] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d20832ba-6279-427f-aa36-4327f862c56a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.441813] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Updating instance 'd8d2958c-e44c-4796-becc-c572057f7ba5' progress to 67 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1136.460905] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f50042a-10f1-4afb-b3e0-8fe058f4ea55 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.471406] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93117d8c-cb29-4a1c-a5b0-6ac87876cd79 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.511143] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b02a5cd9-a5be-4095-9538-f1ecd9a74aa4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.521917] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a4fa7b9-9a9e-492d-a882-490fcb632f22 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.539114] env[69994]: DEBUG nova.compute.provider_tree [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1136.580372] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242611, 'name': CreateVM_Task, 'duration_secs': 0.363931} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.580591] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1136.581311] env[69994]: DEBUG oslo_concurrency.lockutils [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.581478] env[69994]: DEBUG oslo_concurrency.lockutils [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1136.581852] env[69994]: DEBUG oslo_concurrency.lockutils [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1136.582469] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4502629-4c82-4ce9-a4a1-a9f28b46cfc3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.589333] env[69994]: DEBUG oslo_vmware.api [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1136.589333] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52e96f1f-7c7c-ffbf-6141-3284d48f3b12" [ 1136.589333] env[69994]: _type = "Task" [ 1136.589333] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.599133] env[69994]: DEBUG oslo_vmware.api [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52e96f1f-7c7c-ffbf-6141-3284d48f3b12, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.803932] env[69994]: DEBUG oslo_vmware.api [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242608, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.889412] env[69994]: DEBUG nova.network.neutron [req-4854bd54-c352-4cfe-b102-138ebe654a68 req-d1e85fa9-067a-4809-af4f-4011baa9e3c3 service nova] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Updated VIF entry in instance network info cache for port fc354355-eb77-47cd-9f5b-89c8e6616b1d. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1136.889853] env[69994]: DEBUG nova.network.neutron [req-4854bd54-c352-4cfe-b102-138ebe654a68 req-d1e85fa9-067a-4809-af4f-4011baa9e3c3 service nova] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Updating instance_info_cache with network_info: [{"id": "fc354355-eb77-47cd-9f5b-89c8e6616b1d", "address": "fa:16:3e:d3:d6:b9", "network": {"id": "24596b1b-4e9c-466d-85f9-ff79760278bd", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1747505141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "352ad5b68db1480eb657935e006d7dbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc354355-eb", "ovs_interfaceid": "fc354355-eb77-47cd-9f5b-89c8e6616b1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1136.984729] env[69994]: DEBUG nova.network.neutron [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Port 63b08705-8a3c-4011-9ebc-15f8463de275 binding to destination host cpu-1 is already ACTIVE {{(pid=69994) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1137.042477] env[69994]: DEBUG nova.scheduler.client.report [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1137.103406] env[69994]: DEBUG oslo_vmware.api [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52e96f1f-7c7c-ffbf-6141-3284d48f3b12, 'name': SearchDatastore_Task, 'duration_secs': 0.013505} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.103711] env[69994]: DEBUG oslo_concurrency.lockutils [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1137.103956] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1137.104219] env[69994]: DEBUG oslo_concurrency.lockutils [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1137.104372] env[69994]: DEBUG oslo_concurrency.lockutils [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1137.104893] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1137.104893] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c3390516-3d19-4dab-a92c-41e2f2928401 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.116754] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1137.117110] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1137.118102] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0cb9ac42-28a5-46a5-aaae-bbed9941d760 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.126927] env[69994]: DEBUG oslo_vmware.api [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1137.126927] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]529e2a13-2d45-c635-ef03-fb62cb60c57c" [ 1137.126927] env[69994]: _type = "Task" [ 1137.126927] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.137190] env[69994]: DEBUG oslo_vmware.api [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]529e2a13-2d45-c635-ef03-fb62cb60c57c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.305072] env[69994]: DEBUG oslo_vmware.api [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242608, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.394790] env[69994]: DEBUG oslo_concurrency.lockutils [req-4854bd54-c352-4cfe-b102-138ebe654a68 req-d1e85fa9-067a-4809-af4f-4011baa9e3c3 service nova] Releasing lock "refresh_cache-03e58b14-12fe-46e5-b483-4176d5a43c0e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1137.464179] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-318c0435-8465-4398-a2d4-0c18bdfb4f7a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.484824] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Updating instance 'ca237467-eafc-4c18-a56e-98b94d111c92' progress to 0 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1137.547972] env[69994]: DEBUG oslo_concurrency.lockutils [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.307s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1137.548566] env[69994]: DEBUG nova.compute.manager [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1137.638867] env[69994]: DEBUG oslo_vmware.api [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]529e2a13-2d45-c635-ef03-fb62cb60c57c, 'name': SearchDatastore_Task, 'duration_secs': 0.012559} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.639707] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbd749e2-2027-4f35-96f3-a712bad7db82 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.649584] env[69994]: DEBUG oslo_vmware.api [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1137.649584] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52059cb9-ac2c-7585-fd6b-6ac5df68c9c6" [ 1137.649584] env[69994]: _type = "Task" [ 1137.649584] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.659989] env[69994]: DEBUG oslo_vmware.api [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52059cb9-ac2c-7585-fd6b-6ac5df68c9c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.801024] env[69994]: DEBUG oslo_vmware.api [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242608, 'name': CopyVirtualDisk_Task} progress is 88%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.993949] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1138.001804] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f05cbdf1-e7a0-4b63-b9dc-900996dc7dd3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.011063] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "d8d2958c-e44c-4796-becc-c572057f7ba5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1138.011063] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "d8d2958c-e44c-4796-becc-c572057f7ba5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1138.011063] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "d8d2958c-e44c-4796-becc-c572057f7ba5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1138.018787] env[69994]: DEBUG oslo_vmware.api [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 1138.018787] env[69994]: value = "task-3242612" [ 1138.018787] env[69994]: _type = "Task" [ 1138.018787] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.029146] env[69994]: DEBUG oslo_vmware.api [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242612, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.053854] env[69994]: DEBUG nova.compute.utils [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1138.055630] env[69994]: DEBUG nova.compute.manager [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1138.055807] env[69994]: DEBUG nova.network.neutron [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1138.093285] env[69994]: DEBUG nova.policy [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5f2b4659f30f4b9db4627d3d3abb6ba5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '605d72502cc644bfa4d875bf348246de', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1138.161291] env[69994]: DEBUG oslo_vmware.api [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52059cb9-ac2c-7585-fd6b-6ac5df68c9c6, 'name': SearchDatastore_Task, 'duration_secs': 0.01541} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.161540] env[69994]: DEBUG oslo_concurrency.lockutils [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1138.161822] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 03e58b14-12fe-46e5-b483-4176d5a43c0e/03e58b14-12fe-46e5-b483-4176d5a43c0e.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1138.162087] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5b723a2e-8872-40f7-99ed-93050802a396 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.168167] env[69994]: DEBUG oslo_vmware.api [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1138.168167] env[69994]: value = "task-3242613" [ 1138.168167] env[69994]: _type = "Task" [ 1138.168167] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.176787] env[69994]: DEBUG oslo_vmware.api [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242613, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.299354] env[69994]: DEBUG oslo_vmware.api [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242608, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.238281} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.299661] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/1ab8f161-e68e-437c-a5bf-624846f8bc40/1ab8f161-e68e-437c-a5bf-624846f8bc40.vmdk to [datastore2] 65facb63-1323-4905-b107-a5c5782d4a4c/65facb63-1323-4905-b107-a5c5782d4a4c.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1138.300430] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b16f4e0-ba5c-4d7c-91d7-b7ee3042fbf6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.322881] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] 65facb63-1323-4905-b107-a5c5782d4a4c/65facb63-1323-4905-b107-a5c5782d4a4c.vmdk or device None with type streamOptimized {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1138.323226] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-892d4973-2d9b-4942-9582-6e87f1253078 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.344182] env[69994]: DEBUG oslo_vmware.api [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1138.344182] env[69994]: value = "task-3242614" [ 1138.344182] env[69994]: _type = "Task" [ 1138.344182] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.353435] env[69994]: DEBUG oslo_vmware.api [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242614, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.468844] env[69994]: DEBUG nova.network.neutron [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Successfully created port: 9e330706-3213-4a99-b48a-d2e09db34453 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1138.529184] env[69994]: DEBUG oslo_vmware.api [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242612, 'name': PowerOffVM_Task, 'duration_secs': 0.214662} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.531228] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1138.531228] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Updating instance 'ca237467-eafc-4c18-a56e-98b94d111c92' progress to 17 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1138.559266] env[69994]: DEBUG nova.compute.manager [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1138.679440] env[69994]: DEBUG oslo_vmware.api [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242613, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.438854} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.679772] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 03e58b14-12fe-46e5-b483-4176d5a43c0e/03e58b14-12fe-46e5-b483-4176d5a43c0e.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1138.680040] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1138.680312] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b5eff64e-ff6d-4e08-bcc1-8b7d7a782ae4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.686751] env[69994]: DEBUG oslo_vmware.api [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1138.686751] env[69994]: value = "task-3242615" [ 1138.686751] env[69994]: _type = "Task" [ 1138.686751] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.694300] env[69994]: DEBUG oslo_vmware.api [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242615, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.854150] env[69994]: DEBUG oslo_vmware.api [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242614, 'name': ReconfigVM_Task, 'duration_secs': 0.47073} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.854564] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Reconfigured VM instance instance-00000066 to attach disk [datastore2] 65facb63-1323-4905-b107-a5c5782d4a4c/65facb63-1323-4905-b107-a5c5782d4a4c.vmdk or device None with type streamOptimized {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1138.855087] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-08d4f2b6-613d-4780-83f5-a835b93aff5b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.861559] env[69994]: DEBUG oslo_vmware.api [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1138.861559] env[69994]: value = "task-3242616" [ 1138.861559] env[69994]: _type = "Task" [ 1138.861559] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.869357] env[69994]: DEBUG oslo_vmware.api [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242616, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.037890] env[69994]: DEBUG nova.virt.hardware [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1139.038228] env[69994]: DEBUG nova.virt.hardware [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1139.038345] env[69994]: DEBUG nova.virt.hardware [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1139.038528] env[69994]: DEBUG nova.virt.hardware [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1139.038674] env[69994]: DEBUG nova.virt.hardware [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1139.038821] env[69994]: DEBUG nova.virt.hardware [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1139.039031] env[69994]: DEBUG nova.virt.hardware [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1139.039195] env[69994]: DEBUG nova.virt.hardware [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1139.039359] env[69994]: DEBUG nova.virt.hardware [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1139.039518] env[69994]: DEBUG nova.virt.hardware [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1139.039759] env[69994]: DEBUG nova.virt.hardware [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1139.045228] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-65a75b63-cae8-423e-aec5-33c2d3dc509f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.057741] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "refresh_cache-d8d2958c-e44c-4796-becc-c572057f7ba5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1139.057907] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquired lock "refresh_cache-d8d2958c-e44c-4796-becc-c572057f7ba5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1139.058098] env[69994]: DEBUG nova.network.neutron [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1139.070020] env[69994]: DEBUG oslo_vmware.api [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 1139.070020] env[69994]: value = "task-3242617" [ 1139.070020] env[69994]: _type = "Task" [ 1139.070020] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.078931] env[69994]: DEBUG oslo_vmware.api [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242617, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.196451] env[69994]: DEBUG oslo_vmware.api [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242615, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069436} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.197501] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1139.197771] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2020791-df39-4751-bb94-8a4f992beb04 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.220631] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] 03e58b14-12fe-46e5-b483-4176d5a43c0e/03e58b14-12fe-46e5-b483-4176d5a43c0e.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1139.220855] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a043d7e7-943f-418d-9609-b0aeabd67221 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.243023] env[69994]: DEBUG oslo_vmware.api [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1139.243023] env[69994]: value = "task-3242618" [ 1139.243023] env[69994]: _type = "Task" [ 1139.243023] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.253282] env[69994]: DEBUG oslo_vmware.api [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242618, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.372197] env[69994]: DEBUG oslo_vmware.api [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242616, 'name': Rename_Task, 'duration_secs': 0.144854} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.372539] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1139.372871] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c1aad6fd-1424-40c4-9544-c566bb86daf8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.379374] env[69994]: DEBUG oslo_vmware.api [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1139.379374] env[69994]: value = "task-3242619" [ 1139.379374] env[69994]: _type = "Task" [ 1139.379374] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.392902] env[69994]: DEBUG oslo_vmware.api [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242619, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.570948] env[69994]: DEBUG nova.compute.manager [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1139.582066] env[69994]: DEBUG oslo_vmware.api [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242617, 'name': ReconfigVM_Task, 'duration_secs': 0.491124} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.582432] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Updating instance 'ca237467-eafc-4c18-a56e-98b94d111c92' progress to 33 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1139.596152] env[69994]: DEBUG nova.virt.hardware [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1139.596401] env[69994]: DEBUG nova.virt.hardware [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1139.596561] env[69994]: DEBUG nova.virt.hardware [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1139.596744] env[69994]: DEBUG nova.virt.hardware [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1139.596918] env[69994]: DEBUG nova.virt.hardware [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1139.597064] env[69994]: DEBUG nova.virt.hardware [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1139.597276] env[69994]: DEBUG nova.virt.hardware [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1139.597434] env[69994]: DEBUG nova.virt.hardware [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1139.597603] env[69994]: DEBUG nova.virt.hardware [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1139.597766] env[69994]: DEBUG nova.virt.hardware [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1139.597941] env[69994]: DEBUG nova.virt.hardware [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1139.598798] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b8bde80-2752-44a9-84f3-c6e0db975d85 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.606119] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6559aef6-fd31-4cd2-bc9c-a902fe09332a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.749396] env[69994]: DEBUG nova.network.neutron [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Updating instance_info_cache with network_info: [{"id": "63b08705-8a3c-4011-9ebc-15f8463de275", "address": "fa:16:3e:da:d4:f3", "network": {"id": "0e925a40-2706-4137-8dd4-eeed26ae606e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1885024452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "377f65074c2442588aee091b5165e1cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63b08705-8a", "ovs_interfaceid": "63b08705-8a3c-4011-9ebc-15f8463de275", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1139.753863] env[69994]: DEBUG oslo_vmware.api [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242618, 'name': ReconfigVM_Task, 'duration_secs': 0.325228} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.754346] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Reconfigured VM instance instance-00000067 to attach disk [datastore1] 03e58b14-12fe-46e5-b483-4176d5a43c0e/03e58b14-12fe-46e5-b483-4176d5a43c0e.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1139.755008] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1846c137-6581-49fb-8496-46a11b2ec629 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.763699] env[69994]: DEBUG oslo_vmware.api [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1139.763699] env[69994]: value = "task-3242620" [ 1139.763699] env[69994]: _type = "Task" [ 1139.763699] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.775014] env[69994]: DEBUG oslo_vmware.api [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242620, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.846665] env[69994]: DEBUG nova.compute.manager [req-f77a49e6-ec5a-411b-880d-d198c639e4e5 req-3b6d4347-9758-4b50-a258-bb3c086eed49 service nova] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Received event network-vif-plugged-9e330706-3213-4a99-b48a-d2e09db34453 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1139.846887] env[69994]: DEBUG oslo_concurrency.lockutils [req-f77a49e6-ec5a-411b-880d-d198c639e4e5 req-3b6d4347-9758-4b50-a258-bb3c086eed49 service nova] Acquiring lock "d31f167f-8248-4aef-aa3c-6bc7259e1a80-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1139.847107] env[69994]: DEBUG oslo_concurrency.lockutils [req-f77a49e6-ec5a-411b-880d-d198c639e4e5 req-3b6d4347-9758-4b50-a258-bb3c086eed49 service nova] Lock "d31f167f-8248-4aef-aa3c-6bc7259e1a80-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1139.847371] env[69994]: DEBUG oslo_concurrency.lockutils [req-f77a49e6-ec5a-411b-880d-d198c639e4e5 req-3b6d4347-9758-4b50-a258-bb3c086eed49 service nova] Lock "d31f167f-8248-4aef-aa3c-6bc7259e1a80-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1139.847449] env[69994]: DEBUG nova.compute.manager [req-f77a49e6-ec5a-411b-880d-d198c639e4e5 req-3b6d4347-9758-4b50-a258-bb3c086eed49 service nova] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] No waiting events found dispatching network-vif-plugged-9e330706-3213-4a99-b48a-d2e09db34453 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1139.847593] env[69994]: WARNING nova.compute.manager [req-f77a49e6-ec5a-411b-880d-d198c639e4e5 req-3b6d4347-9758-4b50-a258-bb3c086eed49 service nova] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Received unexpected event network-vif-plugged-9e330706-3213-4a99-b48a-d2e09db34453 for instance with vm_state building and task_state spawning. [ 1139.889211] env[69994]: DEBUG oslo_vmware.api [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242619, 'name': PowerOnVM_Task, 'duration_secs': 0.46449} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.889477] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1139.889725] env[69994]: INFO nova.compute.manager [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Took 14.14 seconds to spawn the instance on the hypervisor. [ 1139.889925] env[69994]: DEBUG nova.compute.manager [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1139.890755] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0681cd5a-8063-4b1e-aa33-4a4e88655eaa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.933524] env[69994]: DEBUG nova.network.neutron [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Successfully updated port: 9e330706-3213-4a99-b48a-d2e09db34453 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1140.089456] env[69994]: DEBUG nova.virt.hardware [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1140.089853] env[69994]: DEBUG nova.virt.hardware [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1140.089895] env[69994]: DEBUG nova.virt.hardware [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1140.090159] env[69994]: DEBUG nova.virt.hardware [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1140.090350] env[69994]: DEBUG nova.virt.hardware [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1140.090546] env[69994]: DEBUG nova.virt.hardware [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1140.090783] env[69994]: DEBUG nova.virt.hardware [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1140.090975] env[69994]: DEBUG nova.virt.hardware [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1140.091192] env[69994]: DEBUG nova.virt.hardware [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1140.091486] env[69994]: DEBUG nova.virt.hardware [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1140.091743] env[69994]: DEBUG nova.virt.hardware [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1140.097210] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Reconfiguring VM instance instance-00000065 to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1140.097493] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-665d8adf-d714-443f-8d61-1e50f7291a12 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.116267] env[69994]: DEBUG oslo_vmware.api [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 1140.116267] env[69994]: value = "task-3242621" [ 1140.116267] env[69994]: _type = "Task" [ 1140.116267] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.123843] env[69994]: DEBUG oslo_vmware.api [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242621, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.255434] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Releasing lock "refresh_cache-d8d2958c-e44c-4796-becc-c572057f7ba5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1140.272736] env[69994]: DEBUG oslo_vmware.api [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242620, 'name': Rename_Task, 'duration_secs': 0.150475} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.272983] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1140.273227] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2ec9f2f3-b38a-4db7-9730-2f16dfb0446a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.279418] env[69994]: DEBUG oslo_vmware.api [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1140.279418] env[69994]: value = "task-3242622" [ 1140.279418] env[69994]: _type = "Task" [ 1140.279418] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.286544] env[69994]: DEBUG oslo_vmware.api [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242622, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.413831] env[69994]: INFO nova.compute.manager [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Took 22.79 seconds to build instance. [ 1140.437307] env[69994]: DEBUG oslo_concurrency.lockutils [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "refresh_cache-d31f167f-8248-4aef-aa3c-6bc7259e1a80" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1140.437307] env[69994]: DEBUG oslo_concurrency.lockutils [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquired lock "refresh_cache-d31f167f-8248-4aef-aa3c-6bc7259e1a80" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1140.437307] env[69994]: DEBUG nova.network.neutron [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1140.626495] env[69994]: DEBUG oslo_vmware.api [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242621, 'name': ReconfigVM_Task, 'duration_secs': 0.162202} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.626852] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Reconfigured VM instance instance-00000065 to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1140.627648] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c69acf9e-6c88-4e31-8793-1e06ab193691 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.649795] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] ca237467-eafc-4c18-a56e-98b94d111c92/ca237467-eafc-4c18-a56e-98b94d111c92.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1140.650069] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-550f5732-f975-4923-91d6-263bea4bcb40 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.669943] env[69994]: DEBUG oslo_vmware.api [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 1140.669943] env[69994]: value = "task-3242623" [ 1140.669943] env[69994]: _type = "Task" [ 1140.669943] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.677856] env[69994]: DEBUG oslo_vmware.api [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242623, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.779822] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-757c0d9d-c8b4-464b-af37-08e7f9193295 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.790491] env[69994]: DEBUG oslo_vmware.api [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242622, 'name': PowerOnVM_Task, 'duration_secs': 0.427525} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.804648] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1140.804855] env[69994]: INFO nova.compute.manager [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Took 6.86 seconds to spawn the instance on the hypervisor. [ 1140.805050] env[69994]: DEBUG nova.compute.manager [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1140.805881] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25254b8a-0a21-4d50-bf63-e5c364ba1b0e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.808598] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb7fc652-283f-47d2-adb9-7435dee79001 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.819655] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Updating instance 'd8d2958c-e44c-4796-becc-c572057f7ba5' progress to 83 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1140.916169] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6b184e35-906d-4de2-93e0-af93849ca2d7 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "65facb63-1323-4905-b107-a5c5782d4a4c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.300s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1140.967402] env[69994]: DEBUG nova.network.neutron [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1141.098637] env[69994]: DEBUG nova.network.neutron [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Updating instance_info_cache with network_info: [{"id": "9e330706-3213-4a99-b48a-d2e09db34453", "address": "fa:16:3e:fb:d4:c8", "network": {"id": "6d4a143e-4f55-4918-8454-462892aacf0e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1594130263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "605d72502cc644bfa4d875bf348246de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52e117d3-d120-42c6-8e72-70085845acbf", "external-id": "nsx-vlan-transportzone-934", "segmentation_id": 934, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e330706-32", "ovs_interfaceid": "9e330706-3213-4a99-b48a-d2e09db34453", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1141.146340] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1141.146566] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1141.146753] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1141.179317] env[69994]: DEBUG oslo_vmware.api [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242623, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.329687] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1141.331630] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c2c06c86-d9bd-4ff9-968f-229198fc103f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.333774] env[69994]: INFO nova.compute.manager [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Took 12.61 seconds to build instance. [ 1141.340128] env[69994]: DEBUG oslo_vmware.api [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1141.340128] env[69994]: value = "task-3242624" [ 1141.340128] env[69994]: _type = "Task" [ 1141.340128] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.348331] env[69994]: DEBUG oslo_vmware.api [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242624, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.492493] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2fb62e3d-62ea-459e-b0ba-5dc1583ed48b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "65facb63-1323-4905-b107-a5c5782d4a4c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.492750] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2fb62e3d-62ea-459e-b0ba-5dc1583ed48b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "65facb63-1323-4905-b107-a5c5782d4a4c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.492971] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2fb62e3d-62ea-459e-b0ba-5dc1583ed48b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "65facb63-1323-4905-b107-a5c5782d4a4c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.493174] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2fb62e3d-62ea-459e-b0ba-5dc1583ed48b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "65facb63-1323-4905-b107-a5c5782d4a4c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.493343] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2fb62e3d-62ea-459e-b0ba-5dc1583ed48b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "65facb63-1323-4905-b107-a5c5782d4a4c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.495499] env[69994]: INFO nova.compute.manager [None req-2fb62e3d-62ea-459e-b0ba-5dc1583ed48b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Terminating instance [ 1141.605018] env[69994]: DEBUG oslo_concurrency.lockutils [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Releasing lock "refresh_cache-d31f167f-8248-4aef-aa3c-6bc7259e1a80" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1141.605018] env[69994]: DEBUG nova.compute.manager [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Instance network_info: |[{"id": "9e330706-3213-4a99-b48a-d2e09db34453", "address": "fa:16:3e:fb:d4:c8", "network": {"id": "6d4a143e-4f55-4918-8454-462892aacf0e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1594130263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "605d72502cc644bfa4d875bf348246de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52e117d3-d120-42c6-8e72-70085845acbf", "external-id": "nsx-vlan-transportzone-934", "segmentation_id": 934, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e330706-32", "ovs_interfaceid": "9e330706-3213-4a99-b48a-d2e09db34453", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1141.605018] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fb:d4:c8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '52e117d3-d120-42c6-8e72-70085845acbf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9e330706-3213-4a99-b48a-d2e09db34453', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1141.610717] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1141.611105] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1141.611435] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9b670352-4aa1-429e-a337-7b7df2b73e27 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.632138] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1141.632138] env[69994]: value = "task-3242625" [ 1141.632138] env[69994]: _type = "Task" [ 1141.632138] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.640976] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242625, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.679246] env[69994]: DEBUG oslo_vmware.api [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242623, 'name': ReconfigVM_Task, 'duration_secs': 0.846988} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.683023] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Reconfigured VM instance instance-00000065 to attach disk [datastore1] ca237467-eafc-4c18-a56e-98b94d111c92/ca237467-eafc-4c18-a56e-98b94d111c92.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1141.683023] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Updating instance 'ca237467-eafc-4c18-a56e-98b94d111c92' progress to 50 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1141.836336] env[69994]: DEBUG oslo_concurrency.lockutils [None req-83a78fff-aa7b-4634-ac44-7777fdf50ddc tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lock "03e58b14-12fe-46e5-b483-4176d5a43c0e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.120s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.849811] env[69994]: DEBUG oslo_vmware.api [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242624, 'name': PowerOnVM_Task, 'duration_secs': 0.413368} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.853017] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1141.853017] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cc4ecfeb-b6f7-424c-a169-e3e428bcd5ed tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Updating instance 'd8d2958c-e44c-4796-becc-c572057f7ba5' progress to 100 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1141.879349] env[69994]: DEBUG nova.compute.manager [req-1bc5787f-ff74-4817-9820-d8c53827caf0 req-c1605a27-0b2d-4822-bd6f-a1cbdefd5ab9 service nova] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Received event network-changed-9e330706-3213-4a99-b48a-d2e09db34453 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1141.879548] env[69994]: DEBUG nova.compute.manager [req-1bc5787f-ff74-4817-9820-d8c53827caf0 req-c1605a27-0b2d-4822-bd6f-a1cbdefd5ab9 service nova] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Refreshing instance network info cache due to event network-changed-9e330706-3213-4a99-b48a-d2e09db34453. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1141.879770] env[69994]: DEBUG oslo_concurrency.lockutils [req-1bc5787f-ff74-4817-9820-d8c53827caf0 req-c1605a27-0b2d-4822-bd6f-a1cbdefd5ab9 service nova] Acquiring lock "refresh_cache-d31f167f-8248-4aef-aa3c-6bc7259e1a80" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1141.879904] env[69994]: DEBUG oslo_concurrency.lockutils [req-1bc5787f-ff74-4817-9820-d8c53827caf0 req-c1605a27-0b2d-4822-bd6f-a1cbdefd5ab9 service nova] Acquired lock "refresh_cache-d31f167f-8248-4aef-aa3c-6bc7259e1a80" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1141.880077] env[69994]: DEBUG nova.network.neutron [req-1bc5787f-ff74-4817-9820-d8c53827caf0 req-c1605a27-0b2d-4822-bd6f-a1cbdefd5ab9 service nova] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Refreshing network info cache for port 9e330706-3213-4a99-b48a-d2e09db34453 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1142.000874] env[69994]: DEBUG nova.compute.manager [None req-2fb62e3d-62ea-459e-b0ba-5dc1583ed48b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1142.000874] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2fb62e3d-62ea-459e-b0ba-5dc1583ed48b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1142.003101] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4641d0b3-2853-450b-8e5a-721ada6454f1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.009230] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fb62e3d-62ea-459e-b0ba-5dc1583ed48b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1142.009464] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3d0010c4-22f3-4a16-8648-1ba460119ea9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.015745] env[69994]: DEBUG oslo_vmware.api [None req-2fb62e3d-62ea-459e-b0ba-5dc1583ed48b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1142.015745] env[69994]: value = "task-3242626" [ 1142.015745] env[69994]: _type = "Task" [ 1142.015745] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.024241] env[69994]: DEBUG oslo_vmware.api [None req-2fb62e3d-62ea-459e-b0ba-5dc1583ed48b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242626, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.142937] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242625, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.188445] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b300852e-c51b-4b11-9243-b7b9fcd6aeed {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.212010] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c3880d-05ac-4cbc-89f6-5fd342475686 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.231922] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Updating instance 'ca237467-eafc-4c18-a56e-98b94d111c92' progress to 67 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1142.524978] env[69994]: DEBUG oslo_vmware.api [None req-2fb62e3d-62ea-459e-b0ba-5dc1583ed48b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242626, 'name': PowerOffVM_Task, 'duration_secs': 0.222372} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.525237] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fb62e3d-62ea-459e-b0ba-5dc1583ed48b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1142.525417] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2fb62e3d-62ea-459e-b0ba-5dc1583ed48b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1142.525657] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ecf50541-9001-4524-9e5f-2e91eece0b25 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.566996] env[69994]: DEBUG nova.network.neutron [req-1bc5787f-ff74-4817-9820-d8c53827caf0 req-c1605a27-0b2d-4822-bd6f-a1cbdefd5ab9 service nova] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Updated VIF entry in instance network info cache for port 9e330706-3213-4a99-b48a-d2e09db34453. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1142.567171] env[69994]: DEBUG nova.network.neutron [req-1bc5787f-ff74-4817-9820-d8c53827caf0 req-c1605a27-0b2d-4822-bd6f-a1cbdefd5ab9 service nova] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Updating instance_info_cache with network_info: [{"id": "9e330706-3213-4a99-b48a-d2e09db34453", "address": "fa:16:3e:fb:d4:c8", "network": {"id": "6d4a143e-4f55-4918-8454-462892aacf0e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1594130263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "605d72502cc644bfa4d875bf348246de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52e117d3-d120-42c6-8e72-70085845acbf", "external-id": "nsx-vlan-transportzone-934", "segmentation_id": 934, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e330706-32", "ovs_interfaceid": "9e330706-3213-4a99-b48a-d2e09db34453", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1142.586032] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2fb62e3d-62ea-459e-b0ba-5dc1583ed48b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1142.586244] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2fb62e3d-62ea-459e-b0ba-5dc1583ed48b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1142.586431] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fb62e3d-62ea-459e-b0ba-5dc1583ed48b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Deleting the datastore file [datastore2] 65facb63-1323-4905-b107-a5c5782d4a4c {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1142.586930] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ee424ea7-2f9c-450f-9927-5a05289aca66 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.593358] env[69994]: DEBUG oslo_vmware.api [None req-2fb62e3d-62ea-459e-b0ba-5dc1583ed48b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1142.593358] env[69994]: value = "task-3242628" [ 1142.593358] env[69994]: _type = "Task" [ 1142.593358] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.601300] env[69994]: DEBUG oslo_vmware.api [None req-2fb62e3d-62ea-459e-b0ba-5dc1583ed48b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242628, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.642420] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242625, 'name': CreateVM_Task, 'duration_secs': 0.524762} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.642587] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1142.643248] env[69994]: DEBUG oslo_concurrency.lockutils [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.643411] env[69994]: DEBUG oslo_concurrency.lockutils [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1142.643731] env[69994]: DEBUG oslo_concurrency.lockutils [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1142.643967] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-625e62c7-f52a-409b-a22b-a1dd0283565c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.648233] env[69994]: DEBUG oslo_vmware.api [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1142.648233] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52afd620-840c-f957-9245-24d5ba41903f" [ 1142.648233] env[69994]: _type = "Task" [ 1142.648233] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.656919] env[69994]: DEBUG oslo_vmware.api [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52afd620-840c-f957-9245-24d5ba41903f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.769735] env[69994]: DEBUG nova.network.neutron [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Port c53c19bc-a6d9-4b00-907a-97b4755bb119 binding to destination host cpu-1 is already ACTIVE {{(pid=69994) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1143.070073] env[69994]: DEBUG oslo_concurrency.lockutils [req-1bc5787f-ff74-4817-9820-d8c53827caf0 req-c1605a27-0b2d-4822-bd6f-a1cbdefd5ab9 service nova] Releasing lock "refresh_cache-d31f167f-8248-4aef-aa3c-6bc7259e1a80" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1143.070367] env[69994]: DEBUG nova.compute.manager [req-1bc5787f-ff74-4817-9820-d8c53827caf0 req-c1605a27-0b2d-4822-bd6f-a1cbdefd5ab9 service nova] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Received event network-changed-fc354355-eb77-47cd-9f5b-89c8e6616b1d {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1143.070539] env[69994]: DEBUG nova.compute.manager [req-1bc5787f-ff74-4817-9820-d8c53827caf0 req-c1605a27-0b2d-4822-bd6f-a1cbdefd5ab9 service nova] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Refreshing instance network info cache due to event network-changed-fc354355-eb77-47cd-9f5b-89c8e6616b1d. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1143.070755] env[69994]: DEBUG oslo_concurrency.lockutils [req-1bc5787f-ff74-4817-9820-d8c53827caf0 req-c1605a27-0b2d-4822-bd6f-a1cbdefd5ab9 service nova] Acquiring lock "refresh_cache-03e58b14-12fe-46e5-b483-4176d5a43c0e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1143.070896] env[69994]: DEBUG oslo_concurrency.lockutils [req-1bc5787f-ff74-4817-9820-d8c53827caf0 req-c1605a27-0b2d-4822-bd6f-a1cbdefd5ab9 service nova] Acquired lock "refresh_cache-03e58b14-12fe-46e5-b483-4176d5a43c0e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1143.071065] env[69994]: DEBUG nova.network.neutron [req-1bc5787f-ff74-4817-9820-d8c53827caf0 req-c1605a27-0b2d-4822-bd6f-a1cbdefd5ab9 service nova] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Refreshing network info cache for port fc354355-eb77-47cd-9f5b-89c8e6616b1d {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1143.103830] env[69994]: DEBUG oslo_vmware.api [None req-2fb62e3d-62ea-459e-b0ba-5dc1583ed48b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242628, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131198} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.104072] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fb62e3d-62ea-459e-b0ba-5dc1583ed48b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1143.104262] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2fb62e3d-62ea-459e-b0ba-5dc1583ed48b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1143.104441] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2fb62e3d-62ea-459e-b0ba-5dc1583ed48b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1143.104606] env[69994]: INFO nova.compute.manager [None req-2fb62e3d-62ea-459e-b0ba-5dc1583ed48b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1143.104834] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2fb62e3d-62ea-459e-b0ba-5dc1583ed48b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1143.105037] env[69994]: DEBUG nova.compute.manager [-] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1143.105133] env[69994]: DEBUG nova.network.neutron [-] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1143.160032] env[69994]: DEBUG oslo_vmware.api [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52afd620-840c-f957-9245-24d5ba41903f, 'name': SearchDatastore_Task, 'duration_secs': 0.009163} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.160324] env[69994]: DEBUG oslo_concurrency.lockutils [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1143.161997] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1143.161997] env[69994]: DEBUG oslo_concurrency.lockutils [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1143.161997] env[69994]: DEBUG oslo_concurrency.lockutils [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1143.161997] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1143.161997] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-97e5ada4-ea01-44af-8111-a1800cc9123a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.169893] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1143.170013] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1143.170678] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26480851-c3dd-479f-bf0b-97a2df84a446 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.175966] env[69994]: DEBUG oslo_vmware.api [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1143.175966] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52e8b78a-995a-f9df-7e8f-8c3241ecc67d" [ 1143.175966] env[69994]: _type = "Task" [ 1143.175966] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.183200] env[69994]: DEBUG oslo_vmware.api [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52e8b78a-995a-f9df-7e8f-8c3241ecc67d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.648021] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1143.648279] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1143.686037] env[69994]: DEBUG oslo_vmware.api [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52e8b78a-995a-f9df-7e8f-8c3241ecc67d, 'name': SearchDatastore_Task, 'duration_secs': 0.01075} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.688926] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06899551-133e-46b8-ab3b-21c17264445d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.694625] env[69994]: DEBUG oslo_vmware.api [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1143.694625] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52df6bad-7e62-75ea-9274-6883359b141e" [ 1143.694625] env[69994]: _type = "Task" [ 1143.694625] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.702571] env[69994]: DEBUG oslo_vmware.api [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52df6bad-7e62-75ea-9274-6883359b141e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.764717] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5f1da7ad-8cc2-4722-a885-e85c0fd69683 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "d8d2958c-e44c-4796-becc-c572057f7ba5" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1143.764978] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5f1da7ad-8cc2-4722-a885-e85c0fd69683 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "d8d2958c-e44c-4796-becc-c572057f7ba5" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1143.765172] env[69994]: DEBUG nova.compute.manager [None req-5f1da7ad-8cc2-4722-a885-e85c0fd69683 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Going to confirm migration 6 {{(pid=69994) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1143.790678] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "ca237467-eafc-4c18-a56e-98b94d111c92-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1143.790896] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "ca237467-eafc-4c18-a56e-98b94d111c92-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1143.791083] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "ca237467-eafc-4c18-a56e-98b94d111c92-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1143.796535] env[69994]: DEBUG nova.network.neutron [req-1bc5787f-ff74-4817-9820-d8c53827caf0 req-c1605a27-0b2d-4822-bd6f-a1cbdefd5ab9 service nova] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Updated VIF entry in instance network info cache for port fc354355-eb77-47cd-9f5b-89c8e6616b1d. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1143.796869] env[69994]: DEBUG nova.network.neutron [req-1bc5787f-ff74-4817-9820-d8c53827caf0 req-c1605a27-0b2d-4822-bd6f-a1cbdefd5ab9 service nova] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Updating instance_info_cache with network_info: [{"id": "fc354355-eb77-47cd-9f5b-89c8e6616b1d", "address": "fa:16:3e:d3:d6:b9", "network": {"id": "24596b1b-4e9c-466d-85f9-ff79760278bd", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1747505141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "352ad5b68db1480eb657935e006d7dbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc354355-eb", "ovs_interfaceid": "fc354355-eb77-47cd-9f5b-89c8e6616b1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1143.858149] env[69994]: DEBUG nova.network.neutron [-] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1143.908396] env[69994]: DEBUG nova.compute.manager [req-3907abb1-0a0a-4bb2-955c-41eaf824fc9e req-32073e8c-3733-429e-82f7-9ce124ee459d service nova] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Received event network-vif-deleted-940c4e47-4675-43d9-a93a-1e57a95bb56d {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1144.146729] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1144.146729] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1144.205387] env[69994]: DEBUG oslo_vmware.api [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52df6bad-7e62-75ea-9274-6883359b141e, 'name': SearchDatastore_Task, 'duration_secs': 0.009665} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.205692] env[69994]: DEBUG oslo_concurrency.lockutils [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1144.206021] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] d31f167f-8248-4aef-aa3c-6bc7259e1a80/d31f167f-8248-4aef-aa3c-6bc7259e1a80.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1144.206313] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7d88edc4-28dd-4521-a181-cada578db9bb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.218700] env[69994]: DEBUG oslo_vmware.api [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1144.218700] env[69994]: value = "task-3242629" [ 1144.218700] env[69994]: _type = "Task" [ 1144.218700] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.226397] env[69994]: DEBUG oslo_vmware.api [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242629, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.299587] env[69994]: DEBUG oslo_concurrency.lockutils [req-1bc5787f-ff74-4817-9820-d8c53827caf0 req-c1605a27-0b2d-4822-bd6f-a1cbdefd5ab9 service nova] Releasing lock "refresh_cache-03e58b14-12fe-46e5-b483-4176d5a43c0e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1144.328556] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5f1da7ad-8cc2-4722-a885-e85c0fd69683 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "refresh_cache-d8d2958c-e44c-4796-becc-c572057f7ba5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1144.328796] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5f1da7ad-8cc2-4722-a885-e85c0fd69683 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquired lock "refresh_cache-d8d2958c-e44c-4796-becc-c572057f7ba5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1144.329042] env[69994]: DEBUG nova.network.neutron [None req-5f1da7ad-8cc2-4722-a885-e85c0fd69683 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1144.329273] env[69994]: DEBUG nova.objects.instance [None req-5f1da7ad-8cc2-4722-a885-e85c0fd69683 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lazy-loading 'info_cache' on Instance uuid d8d2958c-e44c-4796-becc-c572057f7ba5 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1144.360231] env[69994]: INFO nova.compute.manager [-] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Took 1.25 seconds to deallocate network for instance. [ 1144.649842] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1144.649901] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1144.650170] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1144.650372] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69994) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1144.651999] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-842970b4-8fec-45ed-8603-ebb10ff88624 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.661130] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-327a854c-927f-4a57-81ac-8eff9043cd3b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.675118] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c040eb36-9518-4c00-989c-0b0cd60f8fd0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.681795] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e3c09f9-8f1a-450a-a2c6-9b649b79b316 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.711785] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179564MB free_disk=120GB free_vcpus=48 pci_devices=None {{(pid=69994) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1144.711972] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1144.712166] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1144.727146] env[69994]: DEBUG oslo_vmware.api [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242629, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.46301} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.727401] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] d31f167f-8248-4aef-aa3c-6bc7259e1a80/d31f167f-8248-4aef-aa3c-6bc7259e1a80.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1144.727726] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1144.727845] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2e5e7420-ea8a-4ab5-b2be-5712546f2f7b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.733407] env[69994]: DEBUG oslo_vmware.api [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1144.733407] env[69994]: value = "task-3242630" [ 1144.733407] env[69994]: _type = "Task" [ 1144.733407] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.740369] env[69994]: DEBUG oslo_vmware.api [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242630, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.823633] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "refresh_cache-ca237467-eafc-4c18-a56e-98b94d111c92" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1144.823826] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquired lock "refresh_cache-ca237467-eafc-4c18-a56e-98b94d111c92" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1144.824014] env[69994]: DEBUG nova.network.neutron [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1144.867081] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2fb62e3d-62ea-459e-b0ba-5dc1583ed48b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1145.244251] env[69994]: DEBUG oslo_vmware.api [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242630, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062354} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.244586] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1145.245249] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7882bca4-55bb-4728-8bf8-3f6afab72c5d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.266898] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] d31f167f-8248-4aef-aa3c-6bc7259e1a80/d31f167f-8248-4aef-aa3c-6bc7259e1a80.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1145.267138] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-13a79346-9242-4d83-b5fd-61fbe33d3dd2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.285927] env[69994]: DEBUG oslo_vmware.api [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1145.285927] env[69994]: value = "task-3242631" [ 1145.285927] env[69994]: _type = "Task" [ 1145.285927] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.293130] env[69994]: DEBUG oslo_vmware.api [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242631, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.503929] env[69994]: DEBUG nova.network.neutron [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Updating instance_info_cache with network_info: [{"id": "c53c19bc-a6d9-4b00-907a-97b4755bb119", "address": "fa:16:3e:81:39:4b", "network": {"id": "14e5b992-c393-4acc-ab6d-ad5983fe2729", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1321568807-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee188ea80c9847188df8b8482b7c6ec7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc53c19bc-a6", "ovs_interfaceid": "c53c19bc-a6d9-4b00-907a-97b4755bb119", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1145.523092] env[69994]: DEBUG nova.network.neutron [None req-5f1da7ad-8cc2-4722-a885-e85c0fd69683 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Updating instance_info_cache with network_info: [{"id": "63b08705-8a3c-4011-9ebc-15f8463de275", "address": "fa:16:3e:da:d4:f3", "network": {"id": "0e925a40-2706-4137-8dd4-eeed26ae606e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1885024452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "377f65074c2442588aee091b5165e1cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63b08705-8a", "ovs_interfaceid": "63b08705-8a3c-4011-9ebc-15f8463de275", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1145.722831] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Applying migration context for instance ca237467-eafc-4c18-a56e-98b94d111c92 as it has an incoming, in-progress migration 3ee157d3-094d-4414-983a-239b7b1ea87f. Migration status is post-migrating {{(pid=69994) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1145.723167] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Applying migration context for instance d8d2958c-e44c-4796-becc-c572057f7ba5 as it has an incoming, in-progress migration 26f071cf-8009-45aa-9b70-228b5d75eb73. Migration status is confirming {{(pid=69994) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1145.724753] env[69994]: INFO nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Updating resource usage from migration 3ee157d3-094d-4414-983a-239b7b1ea87f [ 1145.725127] env[69994]: INFO nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Updating resource usage from migration 26f071cf-8009-45aa-9b70-228b5d75eb73 [ 1145.745644] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance eff21ec5-a51d-4004-9edf-1891f706fe9c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1145.745848] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance ef37ce64-2c26-4080-899a-6d9dbb5850c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1145.745911] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance e6acdc45-5e8f-4ff0-9259-3de73a6fdd14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1145.746069] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 19fedc80-8def-426a-af73-ad871e127e02 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1145.746212] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance be421d40-9859-4e0d-aef8-a2feb8717a78 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1145.746349] env[69994]: WARNING nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 65facb63-1323-4905-b107-a5c5782d4a4c is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1145.746470] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Migration 26f071cf-8009-45aa-9b70-228b5d75eb73 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1145.746586] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance d8d2958c-e44c-4796-becc-c572057f7ba5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1145.746700] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 03e58b14-12fe-46e5-b483-4176d5a43c0e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1145.746813] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Migration 3ee157d3-094d-4414-983a-239b7b1ea87f is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1145.746929] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance ca237467-eafc-4c18-a56e-98b94d111c92 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1145.747053] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance d31f167f-8248-4aef-aa3c-6bc7259e1a80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1145.747243] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Total usable vcpus: 48, total allocated vcpus: 11 {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1145.747380] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2752MB phys_disk=200GB used_disk=11GB total_vcpus=48 used_vcpus=11 pci_stats=[] {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1145.795973] env[69994]: DEBUG oslo_vmware.api [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242631, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.881094] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5e8654b-6003-48a1-96f9-4665bee38090 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.888017] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82ac23c4-9316-4dc6-af54-f6faddc37062 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.919181] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9311000e-b94c-4259-a16b-d0ca0e4ff242 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.928524] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c4aeee3-15ce-44c8-ab62-1a027c0cda28 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.941266] env[69994]: DEBUG nova.compute.provider_tree [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1146.006903] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Releasing lock "refresh_cache-ca237467-eafc-4c18-a56e-98b94d111c92" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1146.026275] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5f1da7ad-8cc2-4722-a885-e85c0fd69683 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Releasing lock "refresh_cache-d8d2958c-e44c-4796-becc-c572057f7ba5" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1146.026275] env[69994]: DEBUG nova.objects.instance [None req-5f1da7ad-8cc2-4722-a885-e85c0fd69683 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lazy-loading 'migration_context' on Instance uuid d8d2958c-e44c-4796-becc-c572057f7ba5 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1146.296539] env[69994]: DEBUG oslo_vmware.api [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242631, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.444729] env[69994]: DEBUG nova.scheduler.client.report [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1146.528073] env[69994]: DEBUG nova.objects.base [None req-5f1da7ad-8cc2-4722-a885-e85c0fd69683 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1146.529288] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0123cda-2222-4ff6-8cbc-0a408859e6e5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.533378] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fa14190-dbc6-4d91-8626-62c57c4edb47 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.568956] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb08d80b-f8fa-4182-b143-c52dac7070cf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.571903] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18532683-16eb-418d-abc9-4afd1c6be8c2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.577509] env[69994]: DEBUG oslo_vmware.api [None req-5f1da7ad-8cc2-4722-a885-e85c0fd69683 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1146.577509] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52698768-c08a-479e-0787-896e5b7ff05d" [ 1146.577509] env[69994]: _type = "Task" [ 1146.577509] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.580081] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Updating instance 'ca237467-eafc-4c18-a56e-98b94d111c92' progress to 83 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1146.591701] env[69994]: DEBUG oslo_vmware.api [None req-5f1da7ad-8cc2-4722-a885-e85c0fd69683 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52698768-c08a-479e-0787-896e5b7ff05d, 'name': SearchDatastore_Task, 'duration_secs': 0.006695} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.592016] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5f1da7ad-8cc2-4722-a885-e85c0fd69683 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1146.797981] env[69994]: DEBUG oslo_vmware.api [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242631, 'name': ReconfigVM_Task, 'duration_secs': 1.358589} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.798314] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Reconfigured VM instance instance-00000068 to attach disk [datastore1] d31f167f-8248-4aef-aa3c-6bc7259e1a80/d31f167f-8248-4aef-aa3c-6bc7259e1a80.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1146.798882] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-417ed8c7-5bec-4fc5-8b92-cf66f3f6094d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.805492] env[69994]: DEBUG oslo_vmware.api [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1146.805492] env[69994]: value = "task-3242632" [ 1146.805492] env[69994]: _type = "Task" [ 1146.805492] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.812956] env[69994]: DEBUG oslo_vmware.api [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242632, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.950076] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69994) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1146.950367] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.238s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1146.950667] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2fb62e3d-62ea-459e-b0ba-5dc1583ed48b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.084s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1146.950895] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2fb62e3d-62ea-459e-b0ba-5dc1583ed48b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1146.953489] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5f1da7ad-8cc2-4722-a885-e85c0fd69683 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.361s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1146.954820] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1146.954966] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Cleaning up deleted instances {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11834}} [ 1146.974467] env[69994]: INFO nova.scheduler.client.report [None req-2fb62e3d-62ea-459e-b0ba-5dc1583ed48b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Deleted allocations for instance 65facb63-1323-4905-b107-a5c5782d4a4c [ 1147.089281] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1147.089559] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f8697400-c43a-4a1c-92cc-0db99959f7a3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.096973] env[69994]: DEBUG oslo_vmware.api [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 1147.096973] env[69994]: value = "task-3242633" [ 1147.096973] env[69994]: _type = "Task" [ 1147.096973] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.104598] env[69994]: DEBUG oslo_vmware.api [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242633, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.315461] env[69994]: DEBUG oslo_vmware.api [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242632, 'name': Rename_Task, 'duration_secs': 0.133105} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.315834] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1147.315975] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ee874cf8-f4cc-452a-81f2-26a426457597 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.321460] env[69994]: DEBUG oslo_vmware.api [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1147.321460] env[69994]: value = "task-3242634" [ 1147.321460] env[69994]: _type = "Task" [ 1147.321460] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.328674] env[69994]: DEBUG oslo_vmware.api [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242634, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.471329] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] There are 51 instances to clean {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11843}} [ 1147.471511] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 05993c51-605c-4154-afc1-f3bc5344258c] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1147.481602] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2fb62e3d-62ea-459e-b0ba-5dc1583ed48b tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "65facb63-1323-4905-b107-a5c5782d4a4c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.989s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1147.609448] env[69994]: DEBUG oslo_vmware.api [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242633, 'name': PowerOnVM_Task, 'duration_secs': 0.352419} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.609767] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1147.609957] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f352f406-7f8f-411c-9d2f-62b85ea784b7 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Updating instance 'ca237467-eafc-4c18-a56e-98b94d111c92' progress to 100 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1147.616695] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af42cf19-1427-44ed-b394-6103af77172b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.623980] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59daa685-9165-4b32-aa03-f35debdbc204 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.655269] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f748966-f150-4273-829e-0c8b744de145 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.664441] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76c7b539-93b1-42ec-a1e8-db155a3c0702 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.678158] env[69994]: DEBUG nova.compute.provider_tree [None req-5f1da7ad-8cc2-4722-a885-e85c0fd69683 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1147.832018] env[69994]: DEBUG oslo_vmware.api [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242634, 'name': PowerOnVM_Task} progress is 93%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.975061] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: dd5ebc73-5866-4a5b-9d4f-aac721b0da8d] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1148.181418] env[69994]: DEBUG nova.scheduler.client.report [None req-5f1da7ad-8cc2-4722-a885-e85c0fd69683 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1148.192429] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ecc612ef-6b65-4634-815c-d1bd3fc36893 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "19fedc80-8def-426a-af73-ad871e127e02" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.192739] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ecc612ef-6b65-4634-815c-d1bd3fc36893 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "19fedc80-8def-426a-af73-ad871e127e02" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1148.193140] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ecc612ef-6b65-4634-815c-d1bd3fc36893 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "19fedc80-8def-426a-af73-ad871e127e02-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.193358] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ecc612ef-6b65-4634-815c-d1bd3fc36893 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "19fedc80-8def-426a-af73-ad871e127e02-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1148.193539] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ecc612ef-6b65-4634-815c-d1bd3fc36893 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "19fedc80-8def-426a-af73-ad871e127e02-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1148.196153] env[69994]: INFO nova.compute.manager [None req-ecc612ef-6b65-4634-815c-d1bd3fc36893 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Terminating instance [ 1148.332847] env[69994]: DEBUG oslo_vmware.api [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242634, 'name': PowerOnVM_Task} progress is 93%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.479309] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 1a5b269f-5ee8-4bcc-812e-78388edb1e50] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1148.701864] env[69994]: DEBUG nova.compute.manager [None req-ecc612ef-6b65-4634-815c-d1bd3fc36893 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1148.702143] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ecc612ef-6b65-4634-815c-d1bd3fc36893 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1148.703086] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ab6601d-ed51-4632-9f65-b5b8ded53be2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.714309] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ecc612ef-6b65-4634-815c-d1bd3fc36893 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1148.714509] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6e505058-6539-4b14-a544-24fe8aa9ec44 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.725320] env[69994]: DEBUG oslo_vmware.api [None req-ecc612ef-6b65-4634-815c-d1bd3fc36893 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1148.725320] env[69994]: value = "task-3242635" [ 1148.725320] env[69994]: _type = "Task" [ 1148.725320] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.735741] env[69994]: DEBUG oslo_vmware.api [None req-ecc612ef-6b65-4634-815c-d1bd3fc36893 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242635, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.749596] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "90e411dd-26f3-421d-b2d0-620c61fe8476" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.749915] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "90e411dd-26f3-421d-b2d0-620c61fe8476" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1148.832651] env[69994]: DEBUG oslo_vmware.api [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242634, 'name': PowerOnVM_Task, 'duration_secs': 1.342533} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.832914] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1148.833149] env[69994]: INFO nova.compute.manager [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Took 9.26 seconds to spawn the instance on the hypervisor. [ 1148.833332] env[69994]: DEBUG nova.compute.manager [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1148.834124] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f73289eb-b171-49b5-a8cb-46d41056f683 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.983544] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 0cc8ff8b-c391-416e-a1f5-9a76d61dfd2c] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1149.190825] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5f1da7ad-8cc2-4722-a885-e85c0fd69683 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.237s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1149.239037] env[69994]: DEBUG oslo_vmware.api [None req-ecc612ef-6b65-4634-815c-d1bd3fc36893 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242635, 'name': PowerOffVM_Task, 'duration_secs': 0.233748} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.239606] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ecc612ef-6b65-4634-815c-d1bd3fc36893 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1149.239802] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ecc612ef-6b65-4634-815c-d1bd3fc36893 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1149.240138] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ebed4c19-d8a5-4c6d-bdcb-576add5652c9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.252970] env[69994]: DEBUG nova.compute.manager [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1149.310988] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ecc612ef-6b65-4634-815c-d1bd3fc36893 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1149.311761] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ecc612ef-6b65-4634-815c-d1bd3fc36893 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1149.311761] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecc612ef-6b65-4634-815c-d1bd3fc36893 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Deleting the datastore file [datastore2] 19fedc80-8def-426a-af73-ad871e127e02 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1149.311761] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f71fade0-b4ed-4145-9433-fab5a1564c0c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.324704] env[69994]: DEBUG oslo_vmware.api [None req-ecc612ef-6b65-4634-815c-d1bd3fc36893 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1149.324704] env[69994]: value = "task-3242637" [ 1149.324704] env[69994]: _type = "Task" [ 1149.324704] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.337253] env[69994]: DEBUG oslo_vmware.api [None req-ecc612ef-6b65-4634-815c-d1bd3fc36893 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242637, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.354027] env[69994]: INFO nova.compute.manager [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Took 14.13 seconds to build instance. [ 1149.486748] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: fc31da72-d09e-415e-9866-3e7fc91fec79] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1149.763629] env[69994]: INFO nova.scheduler.client.report [None req-5f1da7ad-8cc2-4722-a885-e85c0fd69683 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Deleted allocation for migration 26f071cf-8009-45aa-9b70-228b5d75eb73 [ 1149.780561] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1149.780878] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1149.782593] env[69994]: INFO nova.compute.claims [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1149.822472] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "ca237467-eafc-4c18-a56e-98b94d111c92" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1149.822820] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "ca237467-eafc-4c18-a56e-98b94d111c92" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1149.823112] env[69994]: DEBUG nova.compute.manager [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Going to confirm migration 7 {{(pid=69994) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1149.836299] env[69994]: DEBUG oslo_vmware.api [None req-ecc612ef-6b65-4634-815c-d1bd3fc36893 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242637, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.200974} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.836529] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecc612ef-6b65-4634-815c-d1bd3fc36893 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1149.836712] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ecc612ef-6b65-4634-815c-d1bd3fc36893 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1149.836891] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ecc612ef-6b65-4634-815c-d1bd3fc36893 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1149.837075] env[69994]: INFO nova.compute.manager [None req-ecc612ef-6b65-4634-815c-d1bd3fc36893 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1149.837317] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ecc612ef-6b65-4634-815c-d1bd3fc36893 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1149.837732] env[69994]: DEBUG nova.compute.manager [-] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1149.837871] env[69994]: DEBUG nova.network.neutron [-] [instance: 19fedc80-8def-426a-af73-ad871e127e02] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1149.855214] env[69994]: DEBUG oslo_concurrency.lockutils [None req-10e9bff1-28b1-4ee8-8ab4-1a048f2a9896 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "d31f167f-8248-4aef-aa3c-6bc7259e1a80" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.641s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1149.887487] env[69994]: DEBUG nova.compute.manager [req-5e5c314b-f4d1-4ccb-8f3b-4e27fc82e59a req-e089ad2e-b72d-4a8b-943f-8d7f8778baae service nova] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Received event network-changed-9e330706-3213-4a99-b48a-d2e09db34453 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1149.887487] env[69994]: DEBUG nova.compute.manager [req-5e5c314b-f4d1-4ccb-8f3b-4e27fc82e59a req-e089ad2e-b72d-4a8b-943f-8d7f8778baae service nova] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Refreshing instance network info cache due to event network-changed-9e330706-3213-4a99-b48a-d2e09db34453. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1149.887740] env[69994]: DEBUG oslo_concurrency.lockutils [req-5e5c314b-f4d1-4ccb-8f3b-4e27fc82e59a req-e089ad2e-b72d-4a8b-943f-8d7f8778baae service nova] Acquiring lock "refresh_cache-d31f167f-8248-4aef-aa3c-6bc7259e1a80" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.887871] env[69994]: DEBUG oslo_concurrency.lockutils [req-5e5c314b-f4d1-4ccb-8f3b-4e27fc82e59a req-e089ad2e-b72d-4a8b-943f-8d7f8778baae service nova] Acquired lock "refresh_cache-d31f167f-8248-4aef-aa3c-6bc7259e1a80" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1149.888105] env[69994]: DEBUG nova.network.neutron [req-5e5c314b-f4d1-4ccb-8f3b-4e27fc82e59a req-e089ad2e-b72d-4a8b-943f-8d7f8778baae service nova] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Refreshing network info cache for port 9e330706-3213-4a99-b48a-d2e09db34453 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1149.990524] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: f00662a9-92e0-4520-9ced-3cfd6e83628b] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1150.190962] env[69994]: DEBUG nova.compute.manager [req-b182c3f9-8547-4d1d-87e1-7e8327358f64 req-a2c8af8f-c7a3-4df7-9bdc-8b89cde9144c service nova] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Received event network-vif-deleted-41e39a21-c33b-4cc5-80b2-896e3ac13b2e {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1150.191840] env[69994]: INFO nova.compute.manager [req-b182c3f9-8547-4d1d-87e1-7e8327358f64 req-a2c8af8f-c7a3-4df7-9bdc-8b89cde9144c service nova] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Neutron deleted interface 41e39a21-c33b-4cc5-80b2-896e3ac13b2e; detaching it from the instance and deleting it from the info cache [ 1150.191840] env[69994]: DEBUG nova.network.neutron [req-b182c3f9-8547-4d1d-87e1-7e8327358f64 req-a2c8af8f-c7a3-4df7-9bdc-8b89cde9144c service nova] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.272584] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5f1da7ad-8cc2-4722-a885-e85c0fd69683 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "d8d2958c-e44c-4796-becc-c572057f7ba5" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.507s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1150.379661] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "refresh_cache-ca237467-eafc-4c18-a56e-98b94d111c92" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1150.383021] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquired lock "refresh_cache-ca237467-eafc-4c18-a56e-98b94d111c92" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1150.383021] env[69994]: DEBUG nova.network.neutron [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1150.383021] env[69994]: DEBUG nova.objects.instance [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lazy-loading 'info_cache' on Instance uuid ca237467-eafc-4c18-a56e-98b94d111c92 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1150.494956] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: ab99499b-21a2-465b-9975-4e0adb18df94] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1150.586603] env[69994]: DEBUG nova.network.neutron [-] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.636601] env[69994]: DEBUG nova.network.neutron [req-5e5c314b-f4d1-4ccb-8f3b-4e27fc82e59a req-e089ad2e-b72d-4a8b-943f-8d7f8778baae service nova] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Updated VIF entry in instance network info cache for port 9e330706-3213-4a99-b48a-d2e09db34453. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1150.637893] env[69994]: DEBUG nova.network.neutron [req-5e5c314b-f4d1-4ccb-8f3b-4e27fc82e59a req-e089ad2e-b72d-4a8b-943f-8d7f8778baae service nova] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Updating instance_info_cache with network_info: [{"id": "9e330706-3213-4a99-b48a-d2e09db34453", "address": "fa:16:3e:fb:d4:c8", "network": {"id": "6d4a143e-4f55-4918-8454-462892aacf0e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1594130263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "605d72502cc644bfa4d875bf348246de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52e117d3-d120-42c6-8e72-70085845acbf", "external-id": "nsx-vlan-transportzone-934", "segmentation_id": 934, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e330706-32", "ovs_interfaceid": "9e330706-3213-4a99-b48a-d2e09db34453", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.693641] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0772e81b-6773-4ef9-9aad-74a7766de831 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.704895] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e57dc81b-f454-40e6-8d5a-3227644c8c7c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.735750] env[69994]: DEBUG nova.compute.manager [req-b182c3f9-8547-4d1d-87e1-7e8327358f64 req-a2c8af8f-c7a3-4df7-9bdc-8b89cde9144c service nova] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Detach interface failed, port_id=41e39a21-c33b-4cc5-80b2-896e3ac13b2e, reason: Instance 19fedc80-8def-426a-af73-ad871e127e02 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1151.000353] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 799bf051-86b4-45bd-b9bf-df767074dac8] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1151.016071] env[69994]: DEBUG oslo_concurrency.lockutils [None req-700d4b96-563f-4a87-8181-a346cd281dd8 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "d8d2958c-e44c-4796-becc-c572057f7ba5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1151.016352] env[69994]: DEBUG oslo_concurrency.lockutils [None req-700d4b96-563f-4a87-8181-a346cd281dd8 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "d8d2958c-e44c-4796-becc-c572057f7ba5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1151.016622] env[69994]: DEBUG oslo_concurrency.lockutils [None req-700d4b96-563f-4a87-8181-a346cd281dd8 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "d8d2958c-e44c-4796-becc-c572057f7ba5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1151.016776] env[69994]: DEBUG oslo_concurrency.lockutils [None req-700d4b96-563f-4a87-8181-a346cd281dd8 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "d8d2958c-e44c-4796-becc-c572057f7ba5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1151.017015] env[69994]: DEBUG oslo_concurrency.lockutils [None req-700d4b96-563f-4a87-8181-a346cd281dd8 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "d8d2958c-e44c-4796-becc-c572057f7ba5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1151.019092] env[69994]: INFO nova.compute.manager [None req-700d4b96-563f-4a87-8181-a346cd281dd8 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Terminating instance [ 1151.090474] env[69994]: INFO nova.compute.manager [-] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Took 1.25 seconds to deallocate network for instance. [ 1151.139479] env[69994]: DEBUG oslo_concurrency.lockutils [req-5e5c314b-f4d1-4ccb-8f3b-4e27fc82e59a req-e089ad2e-b72d-4a8b-943f-8d7f8778baae service nova] Releasing lock "refresh_cache-d31f167f-8248-4aef-aa3c-6bc7259e1a80" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1151.167511] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03ae4470-5d53-4808-9f23-5c2caacb5e6a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.176118] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dcb56ed-0347-4e5c-ac62-87750408a3be {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.208014] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b273e3c7-8724-435a-9aca-141e6a0bfb66 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.215619] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0edaa07a-6d87-42e6-a2c4-fd3cbe9fe52d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.229495] env[69994]: DEBUG nova.compute.provider_tree [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1151.452073] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "4eb3724f-35ac-4f8d-a742-561b9c0333d8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1151.452468] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "4eb3724f-35ac-4f8d-a742-561b9c0333d8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1151.506201] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 384889a3-c3d9-4e0e-8d1c-95193cf4343d] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1151.523611] env[69994]: DEBUG nova.compute.manager [None req-700d4b96-563f-4a87-8181-a346cd281dd8 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1151.523938] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-700d4b96-563f-4a87-8181-a346cd281dd8 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1151.525199] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dbac4e6-239a-4dd3-ae98-c12401299057 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.540236] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-700d4b96-563f-4a87-8181-a346cd281dd8 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1151.540536] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ac24f3a5-7682-439f-8002-bbdea238d008 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.548179] env[69994]: DEBUG oslo_vmware.api [None req-700d4b96-563f-4a87-8181-a346cd281dd8 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1151.548179] env[69994]: value = "task-3242638" [ 1151.548179] env[69994]: _type = "Task" [ 1151.548179] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.557805] env[69994]: DEBUG oslo_vmware.api [None req-700d4b96-563f-4a87-8181-a346cd281dd8 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242638, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.599398] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ecc612ef-6b65-4634-815c-d1bd3fc36893 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1151.602516] env[69994]: DEBUG nova.network.neutron [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Updating instance_info_cache with network_info: [{"id": "c53c19bc-a6d9-4b00-907a-97b4755bb119", "address": "fa:16:3e:81:39:4b", "network": {"id": "14e5b992-c393-4acc-ab6d-ad5983fe2729", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1321568807-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee188ea80c9847188df8b8482b7c6ec7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31264e2-3e0a-4dfb-ba1f-6389d7d47548", "external-id": "nsx-vlan-transportzone-233", "segmentation_id": 233, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc53c19bc-a6", "ovs_interfaceid": "c53c19bc-a6d9-4b00-907a-97b4755bb119", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1151.733179] env[69994]: DEBUG nova.scheduler.client.report [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1151.955432] env[69994]: DEBUG nova.compute.manager [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1152.009584] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 071151e4-a3ee-4a89-8b83-19bef3fb7d3e] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1152.058613] env[69994]: DEBUG oslo_vmware.api [None req-700d4b96-563f-4a87-8181-a346cd281dd8 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242638, 'name': PowerOffVM_Task, 'duration_secs': 0.321391} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.058891] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-700d4b96-563f-4a87-8181-a346cd281dd8 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1152.059090] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-700d4b96-563f-4a87-8181-a346cd281dd8 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1152.059364] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-43f8d684-6893-422d-ba86-89f54e7abae9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.105438] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Releasing lock "refresh_cache-ca237467-eafc-4c18-a56e-98b94d111c92" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1152.105727] env[69994]: DEBUG nova.objects.instance [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lazy-loading 'migration_context' on Instance uuid ca237467-eafc-4c18-a56e-98b94d111c92 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1152.142692] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-700d4b96-563f-4a87-8181-a346cd281dd8 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1152.142883] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-700d4b96-563f-4a87-8181-a346cd281dd8 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1152.143059] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-700d4b96-563f-4a87-8181-a346cd281dd8 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Deleting the datastore file [datastore2] d8d2958c-e44c-4796-becc-c572057f7ba5 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1152.143418] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a651e574-6271-4d15-971c-0a4ac2817d97 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.152323] env[69994]: DEBUG oslo_vmware.api [None req-700d4b96-563f-4a87-8181-a346cd281dd8 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1152.152323] env[69994]: value = "task-3242640" [ 1152.152323] env[69994]: _type = "Task" [ 1152.152323] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.160779] env[69994]: DEBUG oslo_vmware.api [None req-700d4b96-563f-4a87-8181-a346cd281dd8 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242640, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.239052] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.457s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1152.239266] env[69994]: DEBUG nova.compute.manager [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1152.241864] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ecc612ef-6b65-4634-815c-d1bd3fc36893 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.643s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1152.242086] env[69994]: DEBUG nova.objects.instance [None req-ecc612ef-6b65-4634-815c-d1bd3fc36893 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lazy-loading 'resources' on Instance uuid 19fedc80-8def-426a-af73-ad871e127e02 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1152.477009] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1152.513144] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 81bae584-e558-4f96-9696-2510fed5a2e0] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1152.608901] env[69994]: DEBUG nova.objects.base [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1152.609852] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95eb699b-6939-4eaf-a67f-3865224f6f49 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.629856] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c09619f-e7bc-40c0-bd4d-519faa7596f0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.637627] env[69994]: DEBUG oslo_vmware.api [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 1152.637627] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52d82349-bb4b-c4ff-a60d-a067bbaefbf7" [ 1152.637627] env[69994]: _type = "Task" [ 1152.637627] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.646071] env[69994]: DEBUG oslo_vmware.api [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52d82349-bb4b-c4ff-a60d-a067bbaefbf7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.661543] env[69994]: DEBUG oslo_vmware.api [None req-700d4b96-563f-4a87-8181-a346cd281dd8 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242640, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.211578} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.661794] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-700d4b96-563f-4a87-8181-a346cd281dd8 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1152.662108] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-700d4b96-563f-4a87-8181-a346cd281dd8 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1152.662315] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-700d4b96-563f-4a87-8181-a346cd281dd8 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1152.662685] env[69994]: INFO nova.compute.manager [None req-700d4b96-563f-4a87-8181-a346cd281dd8 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1152.662757] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-700d4b96-563f-4a87-8181-a346cd281dd8 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1152.663047] env[69994]: DEBUG nova.compute.manager [-] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1152.663182] env[69994]: DEBUG nova.network.neutron [-] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1152.745391] env[69994]: DEBUG nova.compute.utils [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1152.750872] env[69994]: DEBUG nova.compute.manager [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1152.750872] env[69994]: DEBUG nova.network.neutron [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1152.796281] env[69994]: DEBUG nova.policy [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '761ebe718b0f48939612e82c6b1e6766', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a4c158f7555d4606b641be4264d95eaa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1152.897417] env[69994]: DEBUG nova.compute.manager [req-d8e3e6f5-1c85-45ad-96ad-5006f6e7bbe5 req-ba831664-77dc-4251-8226-458ad8ac1db1 service nova] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Received event network-vif-deleted-63b08705-8a3c-4011-9ebc-15f8463de275 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1152.897417] env[69994]: INFO nova.compute.manager [req-d8e3e6f5-1c85-45ad-96ad-5006f6e7bbe5 req-ba831664-77dc-4251-8226-458ad8ac1db1 service nova] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Neutron deleted interface 63b08705-8a3c-4011-9ebc-15f8463de275; detaching it from the instance and deleting it from the info cache [ 1152.897417] env[69994]: DEBUG nova.network.neutron [req-d8e3e6f5-1c85-45ad-96ad-5006f6e7bbe5 req-ba831664-77dc-4251-8226-458ad8ac1db1 service nova] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1152.922064] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b92006d8-7496-4d9f-afca-eb8d344b7639 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.933748] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e3b6e71-7e76-4e01-b65a-8a0efb5b6664 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.983155] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baadaf97-cce1-44b0-83bb-5b2aba11e406 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.993628] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0db61669-7a67-4b6d-b2b5-e3f65a1b65d0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.015069] env[69994]: DEBUG nova.compute.provider_tree [None req-ecc612ef-6b65-4634-815c-d1bd3fc36893 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1153.017393] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 47e80abc-2f7e-432c-bd2f-3064841401fc] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1153.060621] env[69994]: DEBUG nova.network.neutron [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Successfully created port: 7df1cfb0-74f4-42d7-9dcf-e8c19410cfc4 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1153.150614] env[69994]: DEBUG oslo_vmware.api [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52d82349-bb4b-c4ff-a60d-a067bbaefbf7, 'name': SearchDatastore_Task, 'duration_secs': 0.015131} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.150989] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1153.250825] env[69994]: DEBUG nova.compute.manager [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1153.380487] env[69994]: DEBUG nova.network.neutron [-] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1153.399862] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-569085f1-e256-4345-b35c-b0b72160c733 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.410515] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61cb13c1-46a0-45de-94f4-a85073003b36 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.440943] env[69994]: DEBUG nova.compute.manager [req-d8e3e6f5-1c85-45ad-96ad-5006f6e7bbe5 req-ba831664-77dc-4251-8226-458ad8ac1db1 service nova] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Detach interface failed, port_id=63b08705-8a3c-4011-9ebc-15f8463de275, reason: Instance d8d2958c-e44c-4796-becc-c572057f7ba5 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1153.521836] env[69994]: DEBUG nova.scheduler.client.report [None req-ecc612ef-6b65-4634-815c-d1bd3fc36893 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1153.525183] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 95b7d534-ac5b-4982-830d-bf65ecd610b3] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1153.883120] env[69994]: INFO nova.compute.manager [-] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Took 1.22 seconds to deallocate network for instance. [ 1154.028240] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ecc612ef-6b65-4634-815c-d1bd3fc36893 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.786s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1154.030461] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: eed22b8d-f8ea-4b90-8730-61d9a89ddfaa] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1154.032399] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.556s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1154.034301] env[69994]: INFO nova.compute.claims [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1154.054354] env[69994]: INFO nova.scheduler.client.report [None req-ecc612ef-6b65-4634-815c-d1bd3fc36893 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Deleted allocations for instance 19fedc80-8def-426a-af73-ad871e127e02 [ 1154.260807] env[69994]: DEBUG nova.compute.manager [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1154.288190] env[69994]: DEBUG nova.virt.hardware [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1154.288502] env[69994]: DEBUG nova.virt.hardware [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1154.288714] env[69994]: DEBUG nova.virt.hardware [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1154.288945] env[69994]: DEBUG nova.virt.hardware [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1154.289164] env[69994]: DEBUG nova.virt.hardware [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1154.290134] env[69994]: DEBUG nova.virt.hardware [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1154.290134] env[69994]: DEBUG nova.virt.hardware [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1154.290134] env[69994]: DEBUG nova.virt.hardware [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1154.290243] env[69994]: DEBUG nova.virt.hardware [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1154.290475] env[69994]: DEBUG nova.virt.hardware [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1154.290635] env[69994]: DEBUG nova.virt.hardware [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1154.291653] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6beef440-6a65-438f-8884-a1b798108efc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.301409] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-715d60c2-8e4c-40f6-abe7-36373bca4a0d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.391129] env[69994]: DEBUG oslo_concurrency.lockutils [None req-700d4b96-563f-4a87-8181-a346cd281dd8 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1154.426367] env[69994]: DEBUG nova.compute.manager [req-66aa1cee-a02a-4d4e-9857-e852b2dc47a7 req-5f69b7a4-e90f-4d1a-91bf-619d2827d766 service nova] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Received event network-vif-plugged-7df1cfb0-74f4-42d7-9dcf-e8c19410cfc4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1154.426640] env[69994]: DEBUG oslo_concurrency.lockutils [req-66aa1cee-a02a-4d4e-9857-e852b2dc47a7 req-5f69b7a4-e90f-4d1a-91bf-619d2827d766 service nova] Acquiring lock "90e411dd-26f3-421d-b2d0-620c61fe8476-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1154.426791] env[69994]: DEBUG oslo_concurrency.lockutils [req-66aa1cee-a02a-4d4e-9857-e852b2dc47a7 req-5f69b7a4-e90f-4d1a-91bf-619d2827d766 service nova] Lock "90e411dd-26f3-421d-b2d0-620c61fe8476-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1154.426958] env[69994]: DEBUG oslo_concurrency.lockutils [req-66aa1cee-a02a-4d4e-9857-e852b2dc47a7 req-5f69b7a4-e90f-4d1a-91bf-619d2827d766 service nova] Lock "90e411dd-26f3-421d-b2d0-620c61fe8476-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1154.427139] env[69994]: DEBUG nova.compute.manager [req-66aa1cee-a02a-4d4e-9857-e852b2dc47a7 req-5f69b7a4-e90f-4d1a-91bf-619d2827d766 service nova] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] No waiting events found dispatching network-vif-plugged-7df1cfb0-74f4-42d7-9dcf-e8c19410cfc4 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1154.427298] env[69994]: WARNING nova.compute.manager [req-66aa1cee-a02a-4d4e-9857-e852b2dc47a7 req-5f69b7a4-e90f-4d1a-91bf-619d2827d766 service nova] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Received unexpected event network-vif-plugged-7df1cfb0-74f4-42d7-9dcf-e8c19410cfc4 for instance with vm_state building and task_state spawning. [ 1154.514437] env[69994]: DEBUG nova.network.neutron [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Successfully updated port: 7df1cfb0-74f4-42d7-9dcf-e8c19410cfc4 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1154.537640] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 8f5a5852-cd78-434f-b413-3cc2314575bb] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1154.562612] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ecc612ef-6b65-4634-815c-d1bd3fc36893 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "19fedc80-8def-426a-af73-ad871e127e02" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.370s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1155.017615] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "refresh_cache-90e411dd-26f3-421d-b2d0-620c61fe8476" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1155.017615] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquired lock "refresh_cache-90e411dd-26f3-421d-b2d0-620c61fe8476" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1155.017940] env[69994]: DEBUG nova.network.neutron [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1155.043179] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 290e8749-6860-4303-b966-65d2efee5499] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1155.187181] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab1c7873-87d8-4f85-8475-293c7e251557 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.194717] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d44599-8a8b-472b-94de-2273ca44a5f9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.225948] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80497cfa-cf7a-4d1e-8114-e183fd23cf54 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.232898] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beb1a714-27d9-4a5f-8e5c-30557d11e2d5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.245749] env[69994]: DEBUG nova.compute.provider_tree [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1155.546392] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 095e75b1-7806-4d1d-ab9e-49735f7aa0f3] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1155.548774] env[69994]: DEBUG nova.network.neutron [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1155.670937] env[69994]: DEBUG nova.network.neutron [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Updating instance_info_cache with network_info: [{"id": "7df1cfb0-74f4-42d7-9dcf-e8c19410cfc4", "address": "fa:16:3e:12:03:e9", "network": {"id": "7880c31a-48c5-419f-856f-539c513f7147", "bridge": "br-int", "label": "tempest-ImagesTestJSON-152259375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4c158f7555d4606b641be4264d95eaa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7df1cfb0-74", "ovs_interfaceid": "7df1cfb0-74f4-42d7-9dcf-e8c19410cfc4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1155.749860] env[69994]: DEBUG nova.scheduler.client.report [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1156.052304] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: e17fcc84-7c86-41b6-88ec-8a35619534b6] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1156.174342] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Releasing lock "refresh_cache-90e411dd-26f3-421d-b2d0-620c61fe8476" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1156.174680] env[69994]: DEBUG nova.compute.manager [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Instance network_info: |[{"id": "7df1cfb0-74f4-42d7-9dcf-e8c19410cfc4", "address": "fa:16:3e:12:03:e9", "network": {"id": "7880c31a-48c5-419f-856f-539c513f7147", "bridge": "br-int", "label": "tempest-ImagesTestJSON-152259375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4c158f7555d4606b641be4264d95eaa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7df1cfb0-74", "ovs_interfaceid": "7df1cfb0-74f4-42d7-9dcf-e8c19410cfc4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1156.175130] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:12:03:e9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4728adca-2846-416a-91a3-deb898faf1f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7df1cfb0-74f4-42d7-9dcf-e8c19410cfc4', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1156.182742] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1156.183011] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1156.183277] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aabc3aee-351f-41a1-96c7-5bdee687dc30 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.203547] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1156.203547] env[69994]: value = "task-3242641" [ 1156.203547] env[69994]: _type = "Task" [ 1156.203547] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.211461] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242641, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.254778] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.222s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1156.255358] env[69994]: DEBUG nova.compute.manager [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1156.258066] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 3.107s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1156.453685] env[69994]: DEBUG nova.compute.manager [req-e8adaf7f-6cfe-4424-861e-748eb8afbde2 req-37d972ad-a044-4be9-86cb-1679425723e6 service nova] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Received event network-changed-7df1cfb0-74f4-42d7-9dcf-e8c19410cfc4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1156.453881] env[69994]: DEBUG nova.compute.manager [req-e8adaf7f-6cfe-4424-861e-748eb8afbde2 req-37d972ad-a044-4be9-86cb-1679425723e6 service nova] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Refreshing instance network info cache due to event network-changed-7df1cfb0-74f4-42d7-9dcf-e8c19410cfc4. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1156.454149] env[69994]: DEBUG oslo_concurrency.lockutils [req-e8adaf7f-6cfe-4424-861e-748eb8afbde2 req-37d972ad-a044-4be9-86cb-1679425723e6 service nova] Acquiring lock "refresh_cache-90e411dd-26f3-421d-b2d0-620c61fe8476" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1156.454256] env[69994]: DEBUG oslo_concurrency.lockutils [req-e8adaf7f-6cfe-4424-861e-748eb8afbde2 req-37d972ad-a044-4be9-86cb-1679425723e6 service nova] Acquired lock "refresh_cache-90e411dd-26f3-421d-b2d0-620c61fe8476" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1156.454413] env[69994]: DEBUG nova.network.neutron [req-e8adaf7f-6cfe-4424-861e-748eb8afbde2 req-37d972ad-a044-4be9-86cb-1679425723e6 service nova] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Refreshing network info cache for port 7df1cfb0-74f4-42d7-9dcf-e8c19410cfc4 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1156.555223] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 850930f9-d5fb-4546-9796-30e164a1cdd3] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1156.715115] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242641, 'name': CreateVM_Task, 'duration_secs': 0.34174} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.715622] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1156.716567] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1156.716745] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1156.717122] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1156.717390] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8af3c313-9db7-439c-b77a-ff3f2f4675a3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.722647] env[69994]: DEBUG oslo_vmware.api [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1156.722647] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52f15738-9063-1c42-bf15-3cfbcaa983d6" [ 1156.722647] env[69994]: _type = "Task" [ 1156.722647] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.730779] env[69994]: DEBUG oslo_vmware.api [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52f15738-9063-1c42-bf15-3cfbcaa983d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.761508] env[69994]: DEBUG nova.compute.utils [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1156.766447] env[69994]: DEBUG nova.compute.manager [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1156.766622] env[69994]: DEBUG nova.network.neutron [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1156.802334] env[69994]: DEBUG nova.policy [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b512f0a1ffba457b977e472009f59eed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '377f65074c2442588aee091b5165e1cf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1156.913058] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f8ea5ed-9100-4527-bb7d-595d8f0e9023 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.920506] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3c59fad-9f9f-4cbd-a1a5-6a83424b5ce7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.951819] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d859e151-194e-456e-ba06-02815cc19411 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.961769] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18e13ed7-d565-4090-bf9e-18a0b150fc42 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.976839] env[69994]: DEBUG nova.compute.provider_tree [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1157.051369] env[69994]: DEBUG nova.network.neutron [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Successfully created port: 160a9f22-7dd7-42df-84d2-d08a12cf2d6b {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1157.060263] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 0d42c1c7-2ac1-44f3-8311-929f141e0a65] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1157.161437] env[69994]: DEBUG nova.network.neutron [req-e8adaf7f-6cfe-4424-861e-748eb8afbde2 req-37d972ad-a044-4be9-86cb-1679425723e6 service nova] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Updated VIF entry in instance network info cache for port 7df1cfb0-74f4-42d7-9dcf-e8c19410cfc4. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1157.161795] env[69994]: DEBUG nova.network.neutron [req-e8adaf7f-6cfe-4424-861e-748eb8afbde2 req-37d972ad-a044-4be9-86cb-1679425723e6 service nova] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Updating instance_info_cache with network_info: [{"id": "7df1cfb0-74f4-42d7-9dcf-e8c19410cfc4", "address": "fa:16:3e:12:03:e9", "network": {"id": "7880c31a-48c5-419f-856f-539c513f7147", "bridge": "br-int", "label": "tempest-ImagesTestJSON-152259375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4c158f7555d4606b641be4264d95eaa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7df1cfb0-74", "ovs_interfaceid": "7df1cfb0-74f4-42d7-9dcf-e8c19410cfc4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1157.233363] env[69994]: DEBUG oslo_vmware.api [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52f15738-9063-1c42-bf15-3cfbcaa983d6, 'name': SearchDatastore_Task, 'duration_secs': 0.013682} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.233743] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1157.233912] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1157.234181] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1157.234395] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1157.234741] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1157.234880] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-874bc92e-686b-4992-9a92-3fc00aa0c452 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.245537] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1157.245723] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1157.246437] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e80e8a20-9853-4ae0-87d4-e968589b3ccf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.252070] env[69994]: DEBUG oslo_vmware.api [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1157.252070] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]526748cd-6e9c-8ae9-ac30-fdc32b9fc755" [ 1157.252070] env[69994]: _type = "Task" [ 1157.252070] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.260484] env[69994]: DEBUG oslo_vmware.api [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]526748cd-6e9c-8ae9-ac30-fdc32b9fc755, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.267107] env[69994]: DEBUG nova.compute.manager [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1157.479485] env[69994]: DEBUG nova.scheduler.client.report [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1157.563194] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: ee7e0c02-ef19-4475-a936-f591c8185797] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1157.666508] env[69994]: DEBUG oslo_concurrency.lockutils [req-e8adaf7f-6cfe-4424-861e-748eb8afbde2 req-37d972ad-a044-4be9-86cb-1679425723e6 service nova] Releasing lock "refresh_cache-90e411dd-26f3-421d-b2d0-620c61fe8476" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1157.764633] env[69994]: DEBUG oslo_vmware.api [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]526748cd-6e9c-8ae9-ac30-fdc32b9fc755, 'name': SearchDatastore_Task, 'duration_secs': 0.009221} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.765446] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c1d8f43-52ce-4b03-978f-b9b8c1bb6031 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.770832] env[69994]: DEBUG oslo_vmware.api [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1157.770832] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52ad4af4-2795-99dc-cb46-5e2ae6785ade" [ 1157.770832] env[69994]: _type = "Task" [ 1157.770832] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.781540] env[69994]: DEBUG oslo_vmware.api [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52ad4af4-2795-99dc-cb46-5e2ae6785ade, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.066262] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 29071eb9-6334-4c23-acb4-142c12aa448d] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1158.278701] env[69994]: DEBUG nova.compute.manager [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1158.284632] env[69994]: DEBUG oslo_vmware.api [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52ad4af4-2795-99dc-cb46-5e2ae6785ade, 'name': SearchDatastore_Task, 'duration_secs': 0.010096} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.285405] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1158.285405] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 90e411dd-26f3-421d-b2d0-620c61fe8476/90e411dd-26f3-421d-b2d0-620c61fe8476.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1158.285405] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1bb3182c-43c2-48df-ac63-448c79105c00 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.293603] env[69994]: DEBUG oslo_vmware.api [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1158.293603] env[69994]: value = "task-3242642" [ 1158.293603] env[69994]: _type = "Task" [ 1158.293603] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.302551] env[69994]: DEBUG oslo_vmware.api [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242642, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.310611] env[69994]: DEBUG nova.virt.hardware [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1158.310851] env[69994]: DEBUG nova.virt.hardware [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1158.311016] env[69994]: DEBUG nova.virt.hardware [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1158.311209] env[69994]: DEBUG nova.virt.hardware [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1158.311354] env[69994]: DEBUG nova.virt.hardware [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1158.311501] env[69994]: DEBUG nova.virt.hardware [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1158.311708] env[69994]: DEBUG nova.virt.hardware [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1158.311865] env[69994]: DEBUG nova.virt.hardware [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1158.312392] env[69994]: DEBUG nova.virt.hardware [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1158.312392] env[69994]: DEBUG nova.virt.hardware [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1158.312392] env[69994]: DEBUG nova.virt.hardware [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1158.313246] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbb1b19b-2201-4f44-b200-fc259d4517b0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.321249] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afcea6dc-8142-493a-b129-b498d4d8bc22 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.480030] env[69994]: DEBUG nova.compute.manager [req-ec64c356-5d3e-4767-898d-5183481f661e req-c2c16a7c-73ea-47dc-ab06-3d8feaadc5e6 service nova] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Received event network-vif-plugged-160a9f22-7dd7-42df-84d2-d08a12cf2d6b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1158.480030] env[69994]: DEBUG oslo_concurrency.lockutils [req-ec64c356-5d3e-4767-898d-5183481f661e req-c2c16a7c-73ea-47dc-ab06-3d8feaadc5e6 service nova] Acquiring lock "4eb3724f-35ac-4f8d-a742-561b9c0333d8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1158.480211] env[69994]: DEBUG oslo_concurrency.lockutils [req-ec64c356-5d3e-4767-898d-5183481f661e req-c2c16a7c-73ea-47dc-ab06-3d8feaadc5e6 service nova] Lock "4eb3724f-35ac-4f8d-a742-561b9c0333d8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1158.480383] env[69994]: DEBUG oslo_concurrency.lockutils [req-ec64c356-5d3e-4767-898d-5183481f661e req-c2c16a7c-73ea-47dc-ab06-3d8feaadc5e6 service nova] Lock "4eb3724f-35ac-4f8d-a742-561b9c0333d8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1158.480552] env[69994]: DEBUG nova.compute.manager [req-ec64c356-5d3e-4767-898d-5183481f661e req-c2c16a7c-73ea-47dc-ab06-3d8feaadc5e6 service nova] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] No waiting events found dispatching network-vif-plugged-160a9f22-7dd7-42df-84d2-d08a12cf2d6b {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1158.480718] env[69994]: WARNING nova.compute.manager [req-ec64c356-5d3e-4767-898d-5183481f661e req-c2c16a7c-73ea-47dc-ab06-3d8feaadc5e6 service nova] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Received unexpected event network-vif-plugged-160a9f22-7dd7-42df-84d2-d08a12cf2d6b for instance with vm_state building and task_state spawning. [ 1158.489107] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.231s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1158.492103] env[69994]: DEBUG oslo_concurrency.lockutils [None req-700d4b96-563f-4a87-8181-a346cd281dd8 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.101s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1158.492258] env[69994]: DEBUG oslo_concurrency.lockutils [None req-700d4b96-563f-4a87-8181-a346cd281dd8 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1158.522178] env[69994]: INFO nova.scheduler.client.report [None req-700d4b96-563f-4a87-8181-a346cd281dd8 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Deleted allocations for instance d8d2958c-e44c-4796-becc-c572057f7ba5 [ 1158.569497] env[69994]: DEBUG nova.network.neutron [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Successfully updated port: 160a9f22-7dd7-42df-84d2-d08a12cf2d6b {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1158.570869] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 4b3addd0-22b0-4793-af75-dba381c4a83f] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1158.803717] env[69994]: DEBUG oslo_vmware.api [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242642, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.461471} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.803979] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 90e411dd-26f3-421d-b2d0-620c61fe8476/90e411dd-26f3-421d-b2d0-620c61fe8476.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1158.804210] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1158.804466] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f1f23a96-213f-4905-ad0e-94d73e1fcca6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.810773] env[69994]: DEBUG oslo_vmware.api [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1158.810773] env[69994]: value = "task-3242643" [ 1158.810773] env[69994]: _type = "Task" [ 1158.810773] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.818392] env[69994]: DEBUG oslo_vmware.api [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242643, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.030609] env[69994]: DEBUG oslo_concurrency.lockutils [None req-700d4b96-563f-4a87-8181-a346cd281dd8 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "d8d2958c-e44c-4796-becc-c572057f7ba5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.014s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1159.042883] env[69994]: INFO nova.scheduler.client.report [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Deleted allocation for migration 3ee157d3-094d-4414-983a-239b7b1ea87f [ 1159.073331] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "refresh_cache-4eb3724f-35ac-4f8d-a742-561b9c0333d8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1159.073480] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquired lock "refresh_cache-4eb3724f-35ac-4f8d-a742-561b9c0333d8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1159.073651] env[69994]: DEBUG nova.network.neutron [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1159.074795] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 17389887-5463-44e1-b1c0-f123d8dedec7] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1159.324864] env[69994]: DEBUG oslo_vmware.api [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242643, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063925} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.325151] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1159.325916] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53f5ab2b-6097-4ed0-b259-f9ecbff4ffc8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.348419] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Reconfiguring VM instance instance-00000069 to attach disk [datastore2] 90e411dd-26f3-421d-b2d0-620c61fe8476/90e411dd-26f3-421d-b2d0-620c61fe8476.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1159.348877] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-563ae6dc-d27e-4687-8b09-86853195c8ee {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.371028] env[69994]: DEBUG oslo_vmware.api [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1159.371028] env[69994]: value = "task-3242644" [ 1159.371028] env[69994]: _type = "Task" [ 1159.371028] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.380383] env[69994]: DEBUG oslo_vmware.api [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242644, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.547569] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "ca237467-eafc-4c18-a56e-98b94d111c92" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 9.725s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1159.579538] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 80705dfe-4768-4f35-8acf-316b15814f78] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1159.609859] env[69994]: DEBUG nova.network.neutron [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1159.736767] env[69994]: DEBUG nova.network.neutron [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Updating instance_info_cache with network_info: [{"id": "160a9f22-7dd7-42df-84d2-d08a12cf2d6b", "address": "fa:16:3e:14:73:69", "network": {"id": "0e925a40-2706-4137-8dd4-eeed26ae606e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1885024452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "377f65074c2442588aee091b5165e1cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap160a9f22-7d", "ovs_interfaceid": "160a9f22-7dd7-42df-84d2-d08a12cf2d6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1159.815305] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9d34de2e-782d-40e4-bc46-48ebf96c8e5c tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "e6acdc45-5e8f-4ff0-9259-3de73a6fdd14" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1159.815563] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9d34de2e-782d-40e4-bc46-48ebf96c8e5c tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "e6acdc45-5e8f-4ff0-9259-3de73a6fdd14" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1159.815772] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9d34de2e-782d-40e4-bc46-48ebf96c8e5c tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "e6acdc45-5e8f-4ff0-9259-3de73a6fdd14-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1159.815959] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9d34de2e-782d-40e4-bc46-48ebf96c8e5c tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "e6acdc45-5e8f-4ff0-9259-3de73a6fdd14-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1159.816151] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9d34de2e-782d-40e4-bc46-48ebf96c8e5c tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "e6acdc45-5e8f-4ff0-9259-3de73a6fdd14-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1159.818171] env[69994]: INFO nova.compute.manager [None req-9d34de2e-782d-40e4-bc46-48ebf96c8e5c tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Terminating instance [ 1159.883323] env[69994]: DEBUG oslo_vmware.api [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242644, 'name': ReconfigVM_Task, 'duration_secs': 0.294619} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.883323] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Reconfigured VM instance instance-00000069 to attach disk [datastore2] 90e411dd-26f3-421d-b2d0-620c61fe8476/90e411dd-26f3-421d-b2d0-620c61fe8476.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1159.883323] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7e73bacc-cf26-4c29-a8f5-9d101341429a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.890440] env[69994]: DEBUG oslo_vmware.api [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1159.890440] env[69994]: value = "task-3242645" [ 1159.890440] env[69994]: _type = "Task" [ 1159.890440] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.901895] env[69994]: DEBUG oslo_vmware.api [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242645, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.082883] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: c14851d2-66c5-4865-ae66-abbe303f0c31] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1160.239673] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Releasing lock "refresh_cache-4eb3724f-35ac-4f8d-a742-561b9c0333d8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1160.239833] env[69994]: DEBUG nova.compute.manager [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Instance network_info: |[{"id": "160a9f22-7dd7-42df-84d2-d08a12cf2d6b", "address": "fa:16:3e:14:73:69", "network": {"id": "0e925a40-2706-4137-8dd4-eeed26ae606e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1885024452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "377f65074c2442588aee091b5165e1cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap160a9f22-7d", "ovs_interfaceid": "160a9f22-7dd7-42df-84d2-d08a12cf2d6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1160.240265] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:14:73:69', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '87bbf4e0-9064-4516-b7e7-44973f817205', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '160a9f22-7dd7-42df-84d2-d08a12cf2d6b', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1160.247991] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1160.248270] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1160.248909] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9f4c43ec-f238-4b36-922c-f638d1b88564 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.269842] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1160.269842] env[69994]: value = "task-3242646" [ 1160.269842] env[69994]: _type = "Task" [ 1160.269842] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.280684] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242646, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.321658] env[69994]: DEBUG nova.compute.manager [None req-9d34de2e-782d-40e4-bc46-48ebf96c8e5c tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1160.321898] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9d34de2e-782d-40e4-bc46-48ebf96c8e5c tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1160.322900] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fcc04a0-1ddd-4068-ae6c-f8a95164fc5f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.330925] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d34de2e-782d-40e4-bc46-48ebf96c8e5c tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1160.331208] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7c0ac961-b97e-4e85-bba4-78ffc94b370f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.342366] env[69994]: DEBUG oslo_vmware.api [None req-9d34de2e-782d-40e4-bc46-48ebf96c8e5c tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1160.342366] env[69994]: value = "task-3242647" [ 1160.342366] env[69994]: _type = "Task" [ 1160.342366] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.355303] env[69994]: DEBUG oslo_vmware.api [None req-9d34de2e-782d-40e4-bc46-48ebf96c8e5c tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242647, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.401063] env[69994]: DEBUG oslo_vmware.api [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242645, 'name': Rename_Task, 'duration_secs': 0.139228} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.401363] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1160.401625] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-60983823-e6cf-483f-8c35-f18a34521986 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.408573] env[69994]: DEBUG oslo_vmware.api [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1160.408573] env[69994]: value = "task-3242648" [ 1160.408573] env[69994]: _type = "Task" [ 1160.408573] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.417193] env[69994]: DEBUG oslo_vmware.api [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242648, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.521312] env[69994]: DEBUG nova.compute.manager [req-39919cd0-d932-4b4a-9c0f-a9699e16a31f req-f909db72-a4e3-44e1-86e1-667482c3b2bb service nova] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Received event network-changed-160a9f22-7dd7-42df-84d2-d08a12cf2d6b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1160.521686] env[69994]: DEBUG nova.compute.manager [req-39919cd0-d932-4b4a-9c0f-a9699e16a31f req-f909db72-a4e3-44e1-86e1-667482c3b2bb service nova] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Refreshing instance network info cache due to event network-changed-160a9f22-7dd7-42df-84d2-d08a12cf2d6b. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1160.521832] env[69994]: DEBUG oslo_concurrency.lockutils [req-39919cd0-d932-4b4a-9c0f-a9699e16a31f req-f909db72-a4e3-44e1-86e1-667482c3b2bb service nova] Acquiring lock "refresh_cache-4eb3724f-35ac-4f8d-a742-561b9c0333d8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1160.522179] env[69994]: DEBUG oslo_concurrency.lockutils [req-39919cd0-d932-4b4a-9c0f-a9699e16a31f req-f909db72-a4e3-44e1-86e1-667482c3b2bb service nova] Acquired lock "refresh_cache-4eb3724f-35ac-4f8d-a742-561b9c0333d8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1160.522300] env[69994]: DEBUG nova.network.neutron [req-39919cd0-d932-4b4a-9c0f-a9699e16a31f req-f909db72-a4e3-44e1-86e1-667482c3b2bb service nova] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Refreshing network info cache for port 160a9f22-7dd7-42df-84d2-d08a12cf2d6b {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1160.586214] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 4ca53416-caed-418c-bb40-cabb8e311803] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1160.780589] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242646, 'name': CreateVM_Task, 'duration_secs': 0.433547} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.780788] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1160.781583] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1160.781685] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1160.781979] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1160.782253] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c286e216-8b1b-4bb4-83b5-b5731273bfe8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.787455] env[69994]: DEBUG oslo_vmware.api [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1160.787455] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52d131cc-94e2-9c81-b6d2-bcb85307fcb0" [ 1160.787455] env[69994]: _type = "Task" [ 1160.787455] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.795435] env[69994]: DEBUG oslo_vmware.api [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52d131cc-94e2-9c81-b6d2-bcb85307fcb0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.852053] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "ca237467-eafc-4c18-a56e-98b94d111c92" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1160.852298] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "ca237467-eafc-4c18-a56e-98b94d111c92" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1160.852524] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "ca237467-eafc-4c18-a56e-98b94d111c92-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1160.852731] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "ca237467-eafc-4c18-a56e-98b94d111c92-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1160.852926] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "ca237467-eafc-4c18-a56e-98b94d111c92-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1160.854550] env[69994]: DEBUG oslo_vmware.api [None req-9d34de2e-782d-40e4-bc46-48ebf96c8e5c tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242647, 'name': PowerOffVM_Task, 'duration_secs': 0.262805} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.854807] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d34de2e-782d-40e4-bc46-48ebf96c8e5c tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1160.854996] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9d34de2e-782d-40e4-bc46-48ebf96c8e5c tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1160.855858] env[69994]: INFO nova.compute.manager [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Terminating instance [ 1160.858666] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7a086e58-172e-4721-94e5-83e09993dd60 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.920096] env[69994]: DEBUG oslo_vmware.api [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242648, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.926835] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9d34de2e-782d-40e4-bc46-48ebf96c8e5c tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1160.927071] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9d34de2e-782d-40e4-bc46-48ebf96c8e5c tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1160.927242] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d34de2e-782d-40e4-bc46-48ebf96c8e5c tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Deleting the datastore file [datastore2] e6acdc45-5e8f-4ff0-9259-3de73a6fdd14 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1160.927498] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6aae3129-5400-4758-ae25-0befc3cb7962 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.934885] env[69994]: DEBUG oslo_vmware.api [None req-9d34de2e-782d-40e4-bc46-48ebf96c8e5c tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1160.934885] env[69994]: value = "task-3242650" [ 1160.934885] env[69994]: _type = "Task" [ 1160.934885] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.945200] env[69994]: DEBUG oslo_vmware.api [None req-9d34de2e-782d-40e4-bc46-48ebf96c8e5c tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242650, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.091265] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: c3ad850d-05b2-4ad3-b922-f45eeb9e5ecc] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1161.210723] env[69994]: DEBUG nova.network.neutron [req-39919cd0-d932-4b4a-9c0f-a9699e16a31f req-f909db72-a4e3-44e1-86e1-667482c3b2bb service nova] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Updated VIF entry in instance network info cache for port 160a9f22-7dd7-42df-84d2-d08a12cf2d6b. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1161.211104] env[69994]: DEBUG nova.network.neutron [req-39919cd0-d932-4b4a-9c0f-a9699e16a31f req-f909db72-a4e3-44e1-86e1-667482c3b2bb service nova] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Updating instance_info_cache with network_info: [{"id": "160a9f22-7dd7-42df-84d2-d08a12cf2d6b", "address": "fa:16:3e:14:73:69", "network": {"id": "0e925a40-2706-4137-8dd4-eeed26ae606e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1885024452-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "377f65074c2442588aee091b5165e1cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap160a9f22-7d", "ovs_interfaceid": "160a9f22-7dd7-42df-84d2-d08a12cf2d6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1161.300221] env[69994]: DEBUG oslo_vmware.api [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52d131cc-94e2-9c81-b6d2-bcb85307fcb0, 'name': SearchDatastore_Task, 'duration_secs': 0.010202} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.301395] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ccfc2419-eba0-4100-b8df-196024a215b0 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "be421d40-9859-4e0d-aef8-a2feb8717a78" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1161.301620] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ccfc2419-eba0-4100-b8df-196024a215b0 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "be421d40-9859-4e0d-aef8-a2feb8717a78" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1161.303026] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1161.303128] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1161.303378] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1161.303521] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1161.303704] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1161.304414] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3e93d870-57aa-4713-9039-2e85385aad38 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.314013] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1161.314238] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1161.315125] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e98638e-1a68-43fe-aa2f-fa224b8589e2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.321047] env[69994]: DEBUG oslo_vmware.api [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1161.321047] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]524e5a99-d74f-4d2c-393a-461efdb7d448" [ 1161.321047] env[69994]: _type = "Task" [ 1161.321047] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.329445] env[69994]: DEBUG oslo_vmware.api [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]524e5a99-d74f-4d2c-393a-461efdb7d448, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.359161] env[69994]: DEBUG nova.compute.manager [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1161.359395] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1161.360346] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79dd30ea-7060-4680-b894-2531d062f3f3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.369082] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1161.369321] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0e1b9e8c-ea10-4f4f-9e36-3b1c89df8928 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.376501] env[69994]: DEBUG oslo_vmware.api [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 1161.376501] env[69994]: value = "task-3242651" [ 1161.376501] env[69994]: _type = "Task" [ 1161.376501] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.392322] env[69994]: DEBUG oslo_vmware.api [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242651, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.420160] env[69994]: DEBUG oslo_vmware.api [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242648, 'name': PowerOnVM_Task, 'duration_secs': 0.615964} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.420530] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1161.420741] env[69994]: INFO nova.compute.manager [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Took 7.16 seconds to spawn the instance on the hypervisor. [ 1161.420923] env[69994]: DEBUG nova.compute.manager [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1161.421745] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e899a043-5e54-416b-a416-eac223c2198f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.447899] env[69994]: DEBUG oslo_vmware.api [None req-9d34de2e-782d-40e4-bc46-48ebf96c8e5c tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242650, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13715} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.448180] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d34de2e-782d-40e4-bc46-48ebf96c8e5c tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1161.448367] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9d34de2e-782d-40e4-bc46-48ebf96c8e5c tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1161.448543] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9d34de2e-782d-40e4-bc46-48ebf96c8e5c tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1161.448716] env[69994]: INFO nova.compute.manager [None req-9d34de2e-782d-40e4-bc46-48ebf96c8e5c tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1161.448971] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9d34de2e-782d-40e4-bc46-48ebf96c8e5c tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1161.449191] env[69994]: DEBUG nova.compute.manager [-] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1161.449281] env[69994]: DEBUG nova.network.neutron [-] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1161.595540] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: e3697388-4598-4dde-8c20-43fc7665083b] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1161.715307] env[69994]: DEBUG oslo_concurrency.lockutils [req-39919cd0-d932-4b4a-9c0f-a9699e16a31f req-f909db72-a4e3-44e1-86e1-667482c3b2bb service nova] Releasing lock "refresh_cache-4eb3724f-35ac-4f8d-a742-561b9c0333d8" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1161.806778] env[69994]: DEBUG nova.compute.utils [None req-ccfc2419-eba0-4100-b8df-196024a215b0 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1161.835784] env[69994]: DEBUG oslo_vmware.api [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]524e5a99-d74f-4d2c-393a-461efdb7d448, 'name': SearchDatastore_Task, 'duration_secs': 0.008744} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.836580] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa4df98b-2177-4737-87a8-83b0e38fff1b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.842088] env[69994]: DEBUG oslo_vmware.api [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1161.842088] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52071eb3-b30a-fc8f-0bf5-0fee07d30721" [ 1161.842088] env[69994]: _type = "Task" [ 1161.842088] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.850984] env[69994]: DEBUG oslo_vmware.api [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52071eb3-b30a-fc8f-0bf5-0fee07d30721, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.887823] env[69994]: DEBUG oslo_vmware.api [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242651, 'name': PowerOffVM_Task, 'duration_secs': 0.180868} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.888095] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1161.888269] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1161.888516] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dacc6eb4-428f-43e8-9c40-af6208e22252 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.944893] env[69994]: INFO nova.compute.manager [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Took 12.19 seconds to build instance. [ 1161.958363] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1161.958577] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1161.958771] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Deleting the datastore file [datastore1] ca237467-eafc-4c18-a56e-98b94d111c92 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1161.959061] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-14efab03-45a5-4e5e-8652-dba1a3fc2234 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.971730] env[69994]: DEBUG oslo_vmware.api [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for the task: (returnval){ [ 1161.971730] env[69994]: value = "task-3242653" [ 1161.971730] env[69994]: _type = "Task" [ 1161.971730] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.981147] env[69994]: DEBUG oslo_vmware.api [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242653, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.099245] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: ff645ae7-940e-4842-8915-a96d36d08067] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1162.311935] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ccfc2419-eba0-4100-b8df-196024a215b0 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "be421d40-9859-4e0d-aef8-a2feb8717a78" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.010s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1162.352849] env[69994]: DEBUG oslo_vmware.api [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52071eb3-b30a-fc8f-0bf5-0fee07d30721, 'name': SearchDatastore_Task, 'duration_secs': 0.009841} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.353122] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1162.353383] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 4eb3724f-35ac-4f8d-a742-561b9c0333d8/4eb3724f-35ac-4f8d-a742-561b9c0333d8.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1162.353632] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a64bed9d-d33f-414f-bc20-d3a8cd502928 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.360877] env[69994]: DEBUG oslo_vmware.api [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1162.360877] env[69994]: value = "task-3242654" [ 1162.360877] env[69994]: _type = "Task" [ 1162.360877] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.368896] env[69994]: DEBUG oslo_vmware.api [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242654, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.424134] env[69994]: DEBUG nova.network.neutron [-] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1162.446269] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d67b140b-abc5-44b8-8fe6-aa667be8ea53 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "90e411dd-26f3-421d-b2d0-620c61fe8476" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.696s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1162.482647] env[69994]: DEBUG oslo_vmware.api [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Task: {'id': task-3242653, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.129189} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.482938] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1162.483148] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1162.483345] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1162.483534] env[69994]: INFO nova.compute.manager [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1162.483788] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1162.484022] env[69994]: DEBUG nova.compute.manager [-] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1162.484139] env[69994]: DEBUG nova.network.neutron [-] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1162.559794] env[69994]: DEBUG nova.compute.manager [req-79cca485-4e02-4b1d-aa23-dd5e7ac725db req-6a9ab3f4-f580-4f84-b473-3a7b58a5716c service nova] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Received event network-vif-deleted-1a71caa6-eaba-4605-b4de-9df7bfa68007 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1162.604188] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: ef410b09-8686-409e-8391-d50cd0e0df04] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1162.872476] env[69994]: DEBUG oslo_vmware.api [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242654, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.49702} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.874257] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 4eb3724f-35ac-4f8d-a742-561b9c0333d8/4eb3724f-35ac-4f8d-a742-561b9c0333d8.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1162.874257] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1162.876453] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c73deab4-3d32-4971-acce-74629ef338bb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.885636] env[69994]: DEBUG oslo_vmware.api [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1162.885636] env[69994]: value = "task-3242655" [ 1162.885636] env[69994]: _type = "Task" [ 1162.885636] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.896619] env[69994]: DEBUG oslo_vmware.api [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242655, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.926801] env[69994]: INFO nova.compute.manager [-] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Took 1.48 seconds to deallocate network for instance. [ 1163.109010] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 7ef329a2-4d61-428a-8a43-f309a1e953d6] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1163.274241] env[69994]: DEBUG nova.network.neutron [-] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1163.380489] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ccfc2419-eba0-4100-b8df-196024a215b0 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "be421d40-9859-4e0d-aef8-a2feb8717a78" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1163.380787] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ccfc2419-eba0-4100-b8df-196024a215b0 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "be421d40-9859-4e0d-aef8-a2feb8717a78" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1163.380996] env[69994]: INFO nova.compute.manager [None req-ccfc2419-eba0-4100-b8df-196024a215b0 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Attaching volume 11b8e021-7578-46be-b87c-5051c0a1cb4d to /dev/sdb [ 1163.397651] env[69994]: DEBUG oslo_vmware.api [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242655, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077414} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.397918] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1163.398752] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-628f9e48-f132-48f5-a5f6-3e06985a4f55 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.421764] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] 4eb3724f-35ac-4f8d-a742-561b9c0333d8/4eb3724f-35ac-4f8d-a742-561b9c0333d8.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1163.423600] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db0d33a4-cd71-41a7-9b53-e7ea6ff81761 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.438097] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a46bfb3-4716-4770-ad53-ac7b9ac1b6a6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.442912] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9d34de2e-782d-40e4-bc46-48ebf96c8e5c tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1163.443234] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9d34de2e-782d-40e4-bc46-48ebf96c8e5c tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1163.443468] env[69994]: DEBUG nova.objects.instance [None req-9d34de2e-782d-40e4-bc46-48ebf96c8e5c tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lazy-loading 'resources' on Instance uuid e6acdc45-5e8f-4ff0-9259-3de73a6fdd14 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1163.450981] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14536b2a-7798-49cf-a09e-876d0ed5a3f4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.453555] env[69994]: DEBUG oslo_vmware.api [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1163.453555] env[69994]: value = "task-3242656" [ 1163.453555] env[69994]: _type = "Task" [ 1163.453555] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.462968] env[69994]: DEBUG oslo_vmware.api [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242656, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.466441] env[69994]: DEBUG nova.virt.block_device [None req-ccfc2419-eba0-4100-b8df-196024a215b0 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Updating existing volume attachment record: b661c067-bbde-4f93-8880-6c0131eaabfa {{(pid=69994) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1163.613018] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 0b975ce0-40a4-48a9-a046-66227636d496] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1163.777454] env[69994]: INFO nova.compute.manager [-] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Took 1.29 seconds to deallocate network for instance. [ 1163.964809] env[69994]: DEBUG oslo_vmware.api [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242656, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.077186] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fb005f5-8a78-4dab-96e7-b038e53452c7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.085355] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11f40b36-d872-4b13-8f23-ca78352dfe32 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.115957] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e049334-dd90-4584-bbee-40257f3cceed {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.119754] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 25a64898-568e-4095-aace-f8a564cdf916] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1164.126818] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09261051-749e-473b-9198-6ac84c468e0f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.142793] env[69994]: DEBUG nova.compute.provider_tree [None req-9d34de2e-782d-40e4-bc46-48ebf96c8e5c tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1164.167664] env[69994]: DEBUG nova.compute.manager [None req-b05810d9-44bb-47c0-8975-fa3b0faf5b87 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1164.168798] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d52e8b-8d88-4253-a322-c3ef4c261dd0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.285135] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1164.466144] env[69994]: DEBUG oslo_vmware.api [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242656, 'name': ReconfigVM_Task, 'duration_secs': 0.540146} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.466421] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Reconfigured VM instance instance-0000006a to attach disk [datastore2] 4eb3724f-35ac-4f8d-a742-561b9c0333d8/4eb3724f-35ac-4f8d-a742-561b9c0333d8.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1164.467040] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9bb54cbc-c37a-43ad-b5eb-255feaaa18c6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.475034] env[69994]: DEBUG oslo_vmware.api [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1164.475034] env[69994]: value = "task-3242660" [ 1164.475034] env[69994]: _type = "Task" [ 1164.475034] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.484556] env[69994]: DEBUG oslo_vmware.api [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242660, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.588251] env[69994]: DEBUG nova.compute.manager [req-2a46fd02-4ab0-4d1a-98a5-e2eb739889df req-8337b747-9496-49e6-8c01-b6c3aa6ab585 service nova] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Received event network-vif-deleted-c53c19bc-a6d9-4b00-907a-97b4755bb119 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1164.623922] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: f3268fe1-768c-4d27-828a-5885ce166f90] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1164.646301] env[69994]: DEBUG nova.scheduler.client.report [None req-9d34de2e-782d-40e4-bc46-48ebf96c8e5c tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1164.680630] env[69994]: INFO nova.compute.manager [None req-b05810d9-44bb-47c0-8975-fa3b0faf5b87 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] instance snapshotting [ 1164.683559] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a5a6d8c-9a77-4afd-bbe7-cfb50ae6a08e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.704170] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f033f26b-206f-45b9-8a9a-1ae7cb727973 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.985557] env[69994]: DEBUG oslo_vmware.api [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242660, 'name': Rename_Task, 'duration_secs': 0.178717} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.986327] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1164.986612] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a264672f-6bc3-4079-8f70-2793f64c434b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.993297] env[69994]: DEBUG oslo_vmware.api [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1164.993297] env[69994]: value = "task-3242661" [ 1164.993297] env[69994]: _type = "Task" [ 1164.993297] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.003110] env[69994]: DEBUG oslo_vmware.api [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242661, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.126919] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 14b28a21-1b71-4d7e-bd6c-269f5d588300] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1165.151463] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9d34de2e-782d-40e4-bc46-48ebf96c8e5c tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.708s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1165.154273] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.869s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1165.154545] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1165.170132] env[69994]: INFO nova.scheduler.client.report [None req-9d34de2e-782d-40e4-bc46-48ebf96c8e5c tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Deleted allocations for instance e6acdc45-5e8f-4ff0-9259-3de73a6fdd14 [ 1165.173031] env[69994]: INFO nova.scheduler.client.report [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Deleted allocations for instance ca237467-eafc-4c18-a56e-98b94d111c92 [ 1165.215616] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b05810d9-44bb-47c0-8975-fa3b0faf5b87 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Creating Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1165.215931] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-08db4a30-275a-4a7a-8eb5-72759c0d2ed5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.225145] env[69994]: DEBUG oslo_vmware.api [None req-b05810d9-44bb-47c0-8975-fa3b0faf5b87 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1165.225145] env[69994]: value = "task-3242662" [ 1165.225145] env[69994]: _type = "Task" [ 1165.225145] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.236605] env[69994]: DEBUG oslo_vmware.api [None req-b05810d9-44bb-47c0-8975-fa3b0faf5b87 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242662, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.504997] env[69994]: DEBUG oslo_vmware.api [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242661, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.630347] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: cd5a47f2-147b-4e50-980d-8e1c40bc7594] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1165.679589] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9d34de2e-782d-40e4-bc46-48ebf96c8e5c tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "e6acdc45-5e8f-4ff0-9259-3de73a6fdd14" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.864s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1165.684234] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3866be17-d024-4ad5-b2c4-77b043d95583 tempest-DeleteServersTestJSON-756950725 tempest-DeleteServersTestJSON-756950725-project-member] Lock "ca237467-eafc-4c18-a56e-98b94d111c92" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.832s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1165.735437] env[69994]: DEBUG oslo_vmware.api [None req-b05810d9-44bb-47c0-8975-fa3b0faf5b87 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242662, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.007042] env[69994]: DEBUG oslo_vmware.api [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242661, 'name': PowerOnVM_Task, 'duration_secs': 0.598013} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.007042] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1166.007214] env[69994]: INFO nova.compute.manager [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Took 7.73 seconds to spawn the instance on the hypervisor. [ 1166.007372] env[69994]: DEBUG nova.compute.manager [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1166.008215] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0dd0094-4ad2-493f-9fc3-a0a6bf483914 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.134159] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 8b4444ee-15fe-457e-bdb8-fa0f1fc46fdf] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1166.237078] env[69994]: DEBUG oslo_vmware.api [None req-b05810d9-44bb-47c0-8975-fa3b0faf5b87 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242662, 'name': CreateSnapshot_Task, 'duration_secs': 0.652686} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.237471] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b05810d9-44bb-47c0-8975-fa3b0faf5b87 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Created Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1166.239670] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db8902e2-5eb9-463c-968f-d0a59099dd0f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.530252] env[69994]: INFO nova.compute.manager [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Took 14.07 seconds to build instance. [ 1166.637919] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: a4544bc9-6935-4825-9b45-2054d2ced330] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1166.761739] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b05810d9-44bb-47c0-8975-fa3b0faf5b87 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Creating linked-clone VM from snapshot {{(pid=69994) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1166.763216] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-36dc970e-0870-4866-95f0-27afcab801f0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.775778] env[69994]: DEBUG oslo_vmware.api [None req-b05810d9-44bb-47c0-8975-fa3b0faf5b87 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1166.775778] env[69994]: value = "task-3242665" [ 1166.775778] env[69994]: _type = "Task" [ 1166.775778] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.784923] env[69994]: DEBUG oslo_vmware.api [None req-b05810d9-44bb-47c0-8975-fa3b0faf5b87 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242665, 'name': CloneVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.033137] env[69994]: DEBUG oslo_concurrency.lockutils [None req-60f9020c-c436-4bd0-b9d3-6450f42353ce tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "4eb3724f-35ac-4f8d-a742-561b9c0333d8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.581s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1167.140699] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 566522b0-7aa7-4552-9be7-035d742ba394] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1167.288496] env[69994]: DEBUG oslo_vmware.api [None req-b05810d9-44bb-47c0-8975-fa3b0faf5b87 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242665, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.631617] env[69994]: DEBUG oslo_concurrency.lockutils [None req-68aa9fbb-1ab3-47f0-933c-52a4c3c07f88 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "4eb3724f-35ac-4f8d-a742-561b9c0333d8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1167.631985] env[69994]: DEBUG oslo_concurrency.lockutils [None req-68aa9fbb-1ab3-47f0-933c-52a4c3c07f88 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "4eb3724f-35ac-4f8d-a742-561b9c0333d8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1167.632262] env[69994]: DEBUG oslo_concurrency.lockutils [None req-68aa9fbb-1ab3-47f0-933c-52a4c3c07f88 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "4eb3724f-35ac-4f8d-a742-561b9c0333d8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1167.632563] env[69994]: DEBUG oslo_concurrency.lockutils [None req-68aa9fbb-1ab3-47f0-933c-52a4c3c07f88 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "4eb3724f-35ac-4f8d-a742-561b9c0333d8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1167.632747] env[69994]: DEBUG oslo_concurrency.lockutils [None req-68aa9fbb-1ab3-47f0-933c-52a4c3c07f88 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "4eb3724f-35ac-4f8d-a742-561b9c0333d8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1167.635676] env[69994]: INFO nova.compute.manager [None req-68aa9fbb-1ab3-47f0-933c-52a4c3c07f88 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Terminating instance [ 1167.644179] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 6bfb2a10-b89e-4152-ae3b-c2b4e93a6a6e] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1167.787322] env[69994]: DEBUG oslo_vmware.api [None req-b05810d9-44bb-47c0-8975-fa3b0faf5b87 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242665, 'name': CloneVM_Task} progress is 95%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.939703] env[69994]: DEBUG nova.compute.manager [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Stashing vm_state: active {{(pid=69994) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1168.011786] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-ccfc2419-eba0-4100-b8df-196024a215b0 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Volume attach. Driver type: vmdk {{(pid=69994) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1168.011786] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-ccfc2419-eba0-4100-b8df-196024a215b0 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-648027', 'volume_id': '11b8e021-7578-46be-b87c-5051c0a1cb4d', 'name': 'volume-11b8e021-7578-46be-b87c-5051c0a1cb4d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'be421d40-9859-4e0d-aef8-a2feb8717a78', 'attached_at': '', 'detached_at': '', 'volume_id': '11b8e021-7578-46be-b87c-5051c0a1cb4d', 'serial': '11b8e021-7578-46be-b87c-5051c0a1cb4d'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1168.012433] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae77b856-12b5-4d77-a4ff-a78cdc859f14 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.030582] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b3f6e5a-c199-4636-af22-13ec7c24fd8c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.056979] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-ccfc2419-eba0-4100-b8df-196024a215b0 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] volume-11b8e021-7578-46be-b87c-5051c0a1cb4d/volume-11b8e021-7578-46be-b87c-5051c0a1cb4d.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1168.057668] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a7475b56-b152-435a-b45a-41d1eb77bd96 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.077338] env[69994]: DEBUG oslo_vmware.api [None req-ccfc2419-eba0-4100-b8df-196024a215b0 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1168.077338] env[69994]: value = "task-3242666" [ 1168.077338] env[69994]: _type = "Task" [ 1168.077338] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.086180] env[69994]: DEBUG oslo_vmware.api [None req-ccfc2419-eba0-4100-b8df-196024a215b0 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242666, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.139880] env[69994]: DEBUG nova.compute.manager [None req-68aa9fbb-1ab3-47f0-933c-52a4c3c07f88 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1168.140259] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-68aa9fbb-1ab3-47f0-933c-52a4c3c07f88 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1168.141447] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a405737c-c264-4afa-b6d4-376986d0df72 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.147432] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: f07750f5-3f1d-4d97-98dc-285ed357cc7e] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1168.151715] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-68aa9fbb-1ab3-47f0-933c-52a4c3c07f88 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1168.152223] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-496f5b21-e37a-413c-b23b-cfed9c259197 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.160982] env[69994]: DEBUG oslo_vmware.api [None req-68aa9fbb-1ab3-47f0-933c-52a4c3c07f88 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1168.160982] env[69994]: value = "task-3242667" [ 1168.160982] env[69994]: _type = "Task" [ 1168.160982] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.169785] env[69994]: DEBUG oslo_vmware.api [None req-68aa9fbb-1ab3-47f0-933c-52a4c3c07f88 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242667, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.288345] env[69994]: DEBUG oslo_vmware.api [None req-b05810d9-44bb-47c0-8975-fa3b0faf5b87 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242665, 'name': CloneVM_Task, 'duration_secs': 1.460815} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.288556] env[69994]: INFO nova.virt.vmwareapi.vmops [None req-b05810d9-44bb-47c0-8975-fa3b0faf5b87 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Created linked-clone VM from snapshot [ 1168.289337] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1456c3b7-0d9e-4534-97f5-61658819c54c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.298233] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-b05810d9-44bb-47c0-8975-fa3b0faf5b87 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Uploading image 9168bf31-a756-49d2-a912-6d5f069593a4 {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1168.311707] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b05810d9-44bb-47c0-8975-fa3b0faf5b87 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Destroying the VM {{(pid=69994) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1168.312018] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-16cb65b5-a6ff-4426-8cc1-94578c97408a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.320008] env[69994]: DEBUG oslo_vmware.api [None req-b05810d9-44bb-47c0-8975-fa3b0faf5b87 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1168.320008] env[69994]: value = "task-3242668" [ 1168.320008] env[69994]: _type = "Task" [ 1168.320008] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.328943] env[69994]: DEBUG oslo_vmware.api [None req-b05810d9-44bb-47c0-8975-fa3b0faf5b87 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242668, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.459686] env[69994]: DEBUG oslo_concurrency.lockutils [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.459988] env[69994]: DEBUG oslo_concurrency.lockutils [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1168.587537] env[69994]: DEBUG oslo_vmware.api [None req-ccfc2419-eba0-4100-b8df-196024a215b0 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242666, 'name': ReconfigVM_Task, 'duration_secs': 0.46351} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.587756] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-ccfc2419-eba0-4100-b8df-196024a215b0 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Reconfigured VM instance instance-00000064 to attach disk [datastore2] volume-11b8e021-7578-46be-b87c-5051c0a1cb4d/volume-11b8e021-7578-46be-b87c-5051c0a1cb4d.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1168.592450] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e4beab12-d259-4058-bd69-f817b9fa7df5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.608652] env[69994]: DEBUG oslo_vmware.api [None req-ccfc2419-eba0-4100-b8df-196024a215b0 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1168.608652] env[69994]: value = "task-3242669" [ 1168.608652] env[69994]: _type = "Task" [ 1168.608652] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.619509] env[69994]: DEBUG oslo_vmware.api [None req-ccfc2419-eba0-4100-b8df-196024a215b0 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242669, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.653405] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: b99b73e6-3348-4d5d-aa57-f01ace0bfc42] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1168.671783] env[69994]: DEBUG oslo_vmware.api [None req-68aa9fbb-1ab3-47f0-933c-52a4c3c07f88 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242667, 'name': PowerOffVM_Task, 'duration_secs': 0.211104} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.672249] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-68aa9fbb-1ab3-47f0-933c-52a4c3c07f88 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1168.672468] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-68aa9fbb-1ab3-47f0-933c-52a4c3c07f88 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1168.672735] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4792f235-35ad-49f3-bc44-5123a4845d25 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.748054] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-68aa9fbb-1ab3-47f0-933c-52a4c3c07f88 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1168.748379] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-68aa9fbb-1ab3-47f0-933c-52a4c3c07f88 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1168.748656] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-68aa9fbb-1ab3-47f0-933c-52a4c3c07f88 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Deleting the datastore file [datastore2] 4eb3724f-35ac-4f8d-a742-561b9c0333d8 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1168.748967] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2d947ed2-0c1f-468e-b21c-cdf4e1f7666c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.756996] env[69994]: DEBUG oslo_vmware.api [None req-68aa9fbb-1ab3-47f0-933c-52a4c3c07f88 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for the task: (returnval){ [ 1168.756996] env[69994]: value = "task-3242671" [ 1168.756996] env[69994]: _type = "Task" [ 1168.756996] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.766239] env[69994]: DEBUG oslo_vmware.api [None req-68aa9fbb-1ab3-47f0-933c-52a4c3c07f88 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242671, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.830119] env[69994]: DEBUG oslo_vmware.api [None req-b05810d9-44bb-47c0-8975-fa3b0faf5b87 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242668, 'name': Destroy_Task, 'duration_secs': 0.417072} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.830389] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-b05810d9-44bb-47c0-8975-fa3b0faf5b87 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Destroyed the VM [ 1168.830625] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b05810d9-44bb-47c0-8975-fa3b0faf5b87 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Deleting Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1168.830879] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-0c86d8f8-a90e-45db-bd35-f37ff381ecf7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.838383] env[69994]: DEBUG oslo_vmware.api [None req-b05810d9-44bb-47c0-8975-fa3b0faf5b87 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1168.838383] env[69994]: value = "task-3242672" [ 1168.838383] env[69994]: _type = "Task" [ 1168.838383] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.846306] env[69994]: DEBUG oslo_vmware.api [None req-b05810d9-44bb-47c0-8975-fa3b0faf5b87 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242672, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.965591] env[69994]: INFO nova.compute.claims [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1169.119728] env[69994]: DEBUG oslo_vmware.api [None req-ccfc2419-eba0-4100-b8df-196024a215b0 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242669, 'name': ReconfigVM_Task, 'duration_secs': 0.167087} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.120251] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-ccfc2419-eba0-4100-b8df-196024a215b0 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-648027', 'volume_id': '11b8e021-7578-46be-b87c-5051c0a1cb4d', 'name': 'volume-11b8e021-7578-46be-b87c-5051c0a1cb4d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'be421d40-9859-4e0d-aef8-a2feb8717a78', 'attached_at': '', 'detached_at': '', 'volume_id': '11b8e021-7578-46be-b87c-5051c0a1cb4d', 'serial': '11b8e021-7578-46be-b87c-5051c0a1cb4d'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1169.157319] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: ed662f67-be0e-4f19-bb8a-6af39b4d348c] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1169.267107] env[69994]: DEBUG oslo_vmware.api [None req-68aa9fbb-1ab3-47f0-933c-52a4c3c07f88 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Task: {'id': task-3242671, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.189108} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.267358] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-68aa9fbb-1ab3-47f0-933c-52a4c3c07f88 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1169.267541] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-68aa9fbb-1ab3-47f0-933c-52a4c3c07f88 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1169.267714] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-68aa9fbb-1ab3-47f0-933c-52a4c3c07f88 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1169.267882] env[69994]: INFO nova.compute.manager [None req-68aa9fbb-1ab3-47f0-933c-52a4c3c07f88 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1169.268145] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-68aa9fbb-1ab3-47f0-933c-52a4c3c07f88 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1169.268341] env[69994]: DEBUG nova.compute.manager [-] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1169.268431] env[69994]: DEBUG nova.network.neutron [-] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1169.350897] env[69994]: DEBUG oslo_vmware.api [None req-b05810d9-44bb-47c0-8975-fa3b0faf5b87 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242672, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.471473] env[69994]: INFO nova.compute.resource_tracker [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Updating resource usage from migration 1f68e833-4694-4d0b-a2dd-8092109b9cfc [ 1169.533647] env[69994]: DEBUG nova.compute.manager [req-e9ec1f96-fa2b-4d8f-93cd-8b7e3ed0b06d req-45d96891-3e79-4a45-b871-e1b14bd924b9 service nova] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Received event network-vif-deleted-160a9f22-7dd7-42df-84d2-d08a12cf2d6b {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1169.533854] env[69994]: INFO nova.compute.manager [req-e9ec1f96-fa2b-4d8f-93cd-8b7e3ed0b06d req-45d96891-3e79-4a45-b871-e1b14bd924b9 service nova] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Neutron deleted interface 160a9f22-7dd7-42df-84d2-d08a12cf2d6b; detaching it from the instance and deleting it from the info cache [ 1169.534054] env[69994]: DEBUG nova.network.neutron [req-e9ec1f96-fa2b-4d8f-93cd-8b7e3ed0b06d req-45d96891-3e79-4a45-b871-e1b14bd924b9 service nova] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1169.636793] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1fc9d30-dd51-4187-ae63-a0a9addab01c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.645884] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72c67e48-a796-40d0-9396-f3f0603929cd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.676359] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 4dbf53e0-caa1-41f4-8376-dfba8d8567cd] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1169.683017] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bcf93a8-ab71-44e2-86e6-b0b5bf376b31 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.689738] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5e4bbf9-adac-42f8-94df-1cd4e643b54f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.705755] env[69994]: DEBUG nova.compute.provider_tree [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1169.855411] env[69994]: DEBUG oslo_vmware.api [None req-b05810d9-44bb-47c0-8975-fa3b0faf5b87 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242672, 'name': RemoveSnapshot_Task, 'duration_secs': 0.814243} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.855411] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b05810d9-44bb-47c0-8975-fa3b0faf5b87 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Deleted Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1170.011807] env[69994]: DEBUG nova.network.neutron [-] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1170.040582] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-61d2d030-6efa-4d92-8573-d24349cfede4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.050730] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b5fbbac-c107-48d6-aa8a-bc51ab7eaa47 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.085079] env[69994]: DEBUG nova.compute.manager [req-e9ec1f96-fa2b-4d8f-93cd-8b7e3ed0b06d req-45d96891-3e79-4a45-b871-e1b14bd924b9 service nova] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Detach interface failed, port_id=160a9f22-7dd7-42df-84d2-d08a12cf2d6b, reason: Instance 4eb3724f-35ac-4f8d-a742-561b9c0333d8 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1170.184902] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: e8caf244-413b-49bb-bdff-79aca0ccbc2b] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1170.187860] env[69994]: DEBUG nova.objects.instance [None req-ccfc2419-eba0-4100-b8df-196024a215b0 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lazy-loading 'flavor' on Instance uuid be421d40-9859-4e0d-aef8-a2feb8717a78 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1170.209166] env[69994]: DEBUG nova.scheduler.client.report [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1170.359022] env[69994]: WARNING nova.compute.manager [None req-b05810d9-44bb-47c0-8975-fa3b0faf5b87 tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Image not found during snapshot: nova.exception.ImageNotFound: Image 9168bf31-a756-49d2-a912-6d5f069593a4 could not be found. [ 1170.515745] env[69994]: INFO nova.compute.manager [-] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Took 1.25 seconds to deallocate network for instance. [ 1170.692450] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 87473dd1-458d-4ef4-a1bd-7e653e509ea4] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1170.695374] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ccfc2419-eba0-4100-b8df-196024a215b0 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "be421d40-9859-4e0d-aef8-a2feb8717a78" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.315s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1170.719175] env[69994]: DEBUG oslo_concurrency.lockutils [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.259s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1170.719339] env[69994]: INFO nova.compute.manager [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Migrating [ 1170.804752] env[69994]: DEBUG oslo_concurrency.lockutils [None req-042813eb-bd84-4b3d-96f9-e28b5ffbd66a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "90e411dd-26f3-421d-b2d0-620c61fe8476" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1170.805086] env[69994]: DEBUG oslo_concurrency.lockutils [None req-042813eb-bd84-4b3d-96f9-e28b5ffbd66a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "90e411dd-26f3-421d-b2d0-620c61fe8476" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1170.805145] env[69994]: DEBUG oslo_concurrency.lockutils [None req-042813eb-bd84-4b3d-96f9-e28b5ffbd66a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "90e411dd-26f3-421d-b2d0-620c61fe8476-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1170.805311] env[69994]: DEBUG oslo_concurrency.lockutils [None req-042813eb-bd84-4b3d-96f9-e28b5ffbd66a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "90e411dd-26f3-421d-b2d0-620c61fe8476-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1170.805591] env[69994]: DEBUG oslo_concurrency.lockutils [None req-042813eb-bd84-4b3d-96f9-e28b5ffbd66a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "90e411dd-26f3-421d-b2d0-620c61fe8476-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1170.807763] env[69994]: INFO nova.compute.manager [None req-042813eb-bd84-4b3d-96f9-e28b5ffbd66a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Terminating instance [ 1171.021619] env[69994]: DEBUG oslo_concurrency.lockutils [None req-68aa9fbb-1ab3-47f0-933c-52a4c3c07f88 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1171.022134] env[69994]: DEBUG oslo_concurrency.lockutils [None req-68aa9fbb-1ab3-47f0-933c-52a4c3c07f88 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1171.022402] env[69994]: DEBUG nova.objects.instance [None req-68aa9fbb-1ab3-47f0-933c-52a4c3c07f88 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lazy-loading 'resources' on Instance uuid 4eb3724f-35ac-4f8d-a742-561b9c0333d8 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1171.164119] env[69994]: INFO nova.compute.manager [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Rebuilding instance [ 1171.196344] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 7f66a148-86fe-4ddc-b8ed-6e6a306bbc24] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1171.210182] env[69994]: DEBUG nova.compute.manager [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1171.211173] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8503dc00-0162-42df-a489-f37572d81136 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.234987] env[69994]: DEBUG oslo_concurrency.lockutils [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "refresh_cache-eff21ec5-a51d-4004-9edf-1891f706fe9c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1171.235194] env[69994]: DEBUG oslo_concurrency.lockutils [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquired lock "refresh_cache-eff21ec5-a51d-4004-9edf-1891f706fe9c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1171.235372] env[69994]: DEBUG nova.network.neutron [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1171.311786] env[69994]: DEBUG nova.compute.manager [None req-042813eb-bd84-4b3d-96f9-e28b5ffbd66a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1171.312032] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-042813eb-bd84-4b3d-96f9-e28b5ffbd66a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1171.313231] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8820f722-4dc7-4581-ab29-2b73eff7952d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.322741] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-042813eb-bd84-4b3d-96f9-e28b5ffbd66a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1171.322997] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d8550760-828f-43b4-b4ec-ecb8a3a12c70 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.330102] env[69994]: DEBUG oslo_vmware.api [None req-042813eb-bd84-4b3d-96f9-e28b5ffbd66a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1171.330102] env[69994]: value = "task-3242673" [ 1171.330102] env[69994]: _type = "Task" [ 1171.330102] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.340467] env[69994]: DEBUG oslo_vmware.api [None req-042813eb-bd84-4b3d-96f9-e28b5ffbd66a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242673, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.651038] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-347991ed-d6cf-4d6b-9b1d-3daae0080bd3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.659784] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6143bad9-e529-4abf-a3e8-acb1260aa469 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.692688] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e3fdd88-977c-4880-b546-d7d6c8ad0f9c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.700091] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 45a8dced-6c49-441c-92e2-ee323ed8753c] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1171.703078] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d69c161-9934-44f5-865f-667e1784fcca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.718210] env[69994]: DEBUG nova.compute.provider_tree [None req-68aa9fbb-1ab3-47f0-933c-52a4c3c07f88 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1171.840265] env[69994]: DEBUG oslo_vmware.api [None req-042813eb-bd84-4b3d-96f9-e28b5ffbd66a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242673, 'name': PowerOffVM_Task, 'duration_secs': 0.302058} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.840535] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-042813eb-bd84-4b3d-96f9-e28b5ffbd66a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1171.840698] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-042813eb-bd84-4b3d-96f9-e28b5ffbd66a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1171.840945] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2e08921c-d997-4ac6-b33d-c83cd1fdc8f0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.911901] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-042813eb-bd84-4b3d-96f9-e28b5ffbd66a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1171.912285] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-042813eb-bd84-4b3d-96f9-e28b5ffbd66a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1171.912550] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-042813eb-bd84-4b3d-96f9-e28b5ffbd66a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Deleting the datastore file [datastore2] 90e411dd-26f3-421d-b2d0-620c61fe8476 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1171.912817] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-822f288d-d045-4cc8-8c18-53df90fe0236 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.919576] env[69994]: DEBUG oslo_vmware.api [None req-042813eb-bd84-4b3d-96f9-e28b5ffbd66a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for the task: (returnval){ [ 1171.919576] env[69994]: value = "task-3242675" [ 1171.919576] env[69994]: _type = "Task" [ 1171.919576] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.930493] env[69994]: DEBUG oslo_vmware.api [None req-042813eb-bd84-4b3d-96f9-e28b5ffbd66a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242675, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.011698] env[69994]: DEBUG nova.network.neutron [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Updating instance_info_cache with network_info: [{"id": "f7d5e758-a993-4a15-8bba-a695f99a96f4", "address": "fa:16:3e:f7:92:8c", "network": {"id": "ac8cc640-01c3-4a64-8b69-1458ee2131a1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1685085861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38d5a89ed7c248c3be506ef12caf5f1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7d5e758-a9", "ovs_interfaceid": "f7d5e758-a993-4a15-8bba-a695f99a96f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1172.209894] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: b003b7c2-e754-440e-8a65-13c5e9c68cd5] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1172.221545] env[69994]: DEBUG nova.scheduler.client.report [None req-68aa9fbb-1ab3-47f0-933c-52a4c3c07f88 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1172.225419] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1172.225641] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e4dea71f-4588-4465-8234-04badbeb300b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.233733] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1172.233733] env[69994]: value = "task-3242676" [ 1172.233733] env[69994]: _type = "Task" [ 1172.233733] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.244022] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242676, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.432146] env[69994]: DEBUG oslo_vmware.api [None req-042813eb-bd84-4b3d-96f9-e28b5ffbd66a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Task: {'id': task-3242675, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139917} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.432413] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-042813eb-bd84-4b3d-96f9-e28b5ffbd66a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1172.432610] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-042813eb-bd84-4b3d-96f9-e28b5ffbd66a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1172.432786] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-042813eb-bd84-4b3d-96f9-e28b5ffbd66a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1172.433137] env[69994]: INFO nova.compute.manager [None req-042813eb-bd84-4b3d-96f9-e28b5ffbd66a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1172.433294] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-042813eb-bd84-4b3d-96f9-e28b5ffbd66a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1172.433489] env[69994]: DEBUG nova.compute.manager [-] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1172.433565] env[69994]: DEBUG nova.network.neutron [-] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1172.514063] env[69994]: DEBUG oslo_concurrency.lockutils [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Releasing lock "refresh_cache-eff21ec5-a51d-4004-9edf-1891f706fe9c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1172.713563] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: dbad6bed-64ba-4dfd-abad-c0b2c775ba2c] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1172.716587] env[69994]: DEBUG nova.compute.manager [req-10947cc2-83b0-4d15-a8ca-f8c729e90689 req-ddb8ab54-f4ee-431e-9782-35087cf5dfc3 service nova] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Received event network-vif-deleted-7df1cfb0-74f4-42d7-9dcf-e8c19410cfc4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1172.716898] env[69994]: INFO nova.compute.manager [req-10947cc2-83b0-4d15-a8ca-f8c729e90689 req-ddb8ab54-f4ee-431e-9782-35087cf5dfc3 service nova] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Neutron deleted interface 7df1cfb0-74f4-42d7-9dcf-e8c19410cfc4; detaching it from the instance and deleting it from the info cache [ 1172.717092] env[69994]: DEBUG nova.network.neutron [req-10947cc2-83b0-4d15-a8ca-f8c729e90689 req-ddb8ab54-f4ee-431e-9782-35087cf5dfc3 service nova] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1172.727268] env[69994]: DEBUG oslo_concurrency.lockutils [None req-68aa9fbb-1ab3-47f0-933c-52a4c3c07f88 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.705s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1172.746598] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242676, 'name': PowerOffVM_Task, 'duration_secs': 0.231622} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.746871] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1172.752306] env[69994]: INFO nova.scheduler.client.report [None req-68aa9fbb-1ab3-47f0-933c-52a4c3c07f88 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Deleted allocations for instance 4eb3724f-35ac-4f8d-a742-561b9c0333d8 [ 1172.815991] env[69994]: INFO nova.compute.manager [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Detaching volume 11b8e021-7578-46be-b87c-5051c0a1cb4d [ 1172.847287] env[69994]: INFO nova.virt.block_device [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Attempting to driver detach volume 11b8e021-7578-46be-b87c-5051c0a1cb4d from mountpoint /dev/sdb [ 1172.847526] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Volume detach. Driver type: vmdk {{(pid=69994) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1172.847712] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-648027', 'volume_id': '11b8e021-7578-46be-b87c-5051c0a1cb4d', 'name': 'volume-11b8e021-7578-46be-b87c-5051c0a1cb4d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'be421d40-9859-4e0d-aef8-a2feb8717a78', 'attached_at': '', 'detached_at': '', 'volume_id': '11b8e021-7578-46be-b87c-5051c0a1cb4d', 'serial': '11b8e021-7578-46be-b87c-5051c0a1cb4d'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1172.848625] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c16e1e57-e5fe-4eca-b17e-0f72707d8cd9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.873949] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87c91903-124a-4b0e-9cfb-83408d55802c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.881607] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2bdf283-9086-495c-921c-82022ae30689 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.903123] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8565039-cde2-4790-ab04-5a7a487a9fd7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.918492] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] The volume has not been displaced from its original location: [datastore2] volume-11b8e021-7578-46be-b87c-5051c0a1cb4d/volume-11b8e021-7578-46be-b87c-5051c0a1cb4d.vmdk. No consolidation needed. {{(pid=69994) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1172.923812] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Reconfiguring VM instance instance-00000064 to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1172.924106] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fc610d57-738e-4ed8-ac0e-3a773e95b292 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.947501] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1172.947501] env[69994]: value = "task-3242677" [ 1172.947501] env[69994]: _type = "Task" [ 1172.947501] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.957644] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242677, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.198779] env[69994]: DEBUG nova.network.neutron [-] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1173.216421] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1173.216585] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Cleaning up deleted instances with incomplete migration {{(pid=69994) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11872}} [ 1173.219126] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c95be11b-5db4-41dd-83f5-93145e7034ac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.229252] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29bb038f-d783-4eb7-be09-dc1f7f7e8920 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.261019] env[69994]: DEBUG nova.compute.manager [req-10947cc2-83b0-4d15-a8ca-f8c729e90689 req-ddb8ab54-f4ee-431e-9782-35087cf5dfc3 service nova] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Detach interface failed, port_id=7df1cfb0-74f4-42d7-9dcf-e8c19410cfc4, reason: Instance 90e411dd-26f3-421d-b2d0-620c61fe8476 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1173.261546] env[69994]: DEBUG oslo_concurrency.lockutils [None req-68aa9fbb-1ab3-47f0-933c-52a4c3c07f88 tempest-ServerDiskConfigTestJSON-1237556642 tempest-ServerDiskConfigTestJSON-1237556642-project-member] Lock "4eb3724f-35ac-4f8d-a742-561b9c0333d8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.630s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1173.457671] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242677, 'name': ReconfigVM_Task, 'duration_secs': 0.25299} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.457938] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Reconfigured VM instance instance-00000064 to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1173.463856] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3f94ce32-3676-4192-86cd-a14d74e35c21 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.479308] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1173.479308] env[69994]: value = "task-3242678" [ 1173.479308] env[69994]: _type = "Task" [ 1173.479308] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.487279] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242678, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.701425] env[69994]: INFO nova.compute.manager [-] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Took 1.27 seconds to deallocate network for instance. [ 1173.990205] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242678, 'name': ReconfigVM_Task, 'duration_secs': 0.142663} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.990583] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-648027', 'volume_id': '11b8e021-7578-46be-b87c-5051c0a1cb4d', 'name': 'volume-11b8e021-7578-46be-b87c-5051c0a1cb4d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'be421d40-9859-4e0d-aef8-a2feb8717a78', 'attached_at': '', 'detached_at': '', 'volume_id': '11b8e021-7578-46be-b87c-5051c0a1cb4d', 'serial': '11b8e021-7578-46be-b87c-5051c0a1cb4d'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1174.031157] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2c5cdbe-aa5c-4cef-a31f-5a9d53dde351 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.055377] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Updating instance 'eff21ec5-a51d-4004-9edf-1891f706fe9c' progress to 0 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1174.208195] env[69994]: DEBUG oslo_concurrency.lockutils [None req-042813eb-bd84-4b3d-96f9-e28b5ffbd66a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1174.208508] env[69994]: DEBUG oslo_concurrency.lockutils [None req-042813eb-bd84-4b3d-96f9-e28b5ffbd66a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1174.208731] env[69994]: DEBUG nova.objects.instance [None req-042813eb-bd84-4b3d-96f9-e28b5ffbd66a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lazy-loading 'resources' on Instance uuid 90e411dd-26f3-421d-b2d0-620c61fe8476 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1174.561726] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1174.561967] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0082dca2-abda-4cc4-882a-19ebc39ceeb4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.571448] env[69994]: DEBUG oslo_vmware.api [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1174.571448] env[69994]: value = "task-3242679" [ 1174.571448] env[69994]: _type = "Task" [ 1174.571448] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.582156] env[69994]: DEBUG oslo_vmware.api [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242679, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.718642] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1174.718642] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1174.827627] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a8bb106-cfd4-4822-8702-ea1aa88aefdb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.837552] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b2ef296-cc01-444a-9f0d-45a6f2ad3add {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.869202] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f618e7b0-db75-4b59-a452-05cc95cb3a12 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.877936] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d00a8f5-3f39-4719-8264-a625b0a44ec5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.891502] env[69994]: DEBUG nova.compute.provider_tree [None req-042813eb-bd84-4b3d-96f9-e28b5ffbd66a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1175.040595] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1175.040930] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3b52e9e5-f480-4adc-93a9-f1ca6302f7d7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.049499] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1175.049499] env[69994]: value = "task-3242680" [ 1175.049499] env[69994]: _type = "Task" [ 1175.049499] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.057427] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242680, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.080537] env[69994]: DEBUG oslo_vmware.api [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242679, 'name': PowerOffVM_Task, 'duration_secs': 0.231561} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.080788] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1175.080971] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Updating instance 'eff21ec5-a51d-4004-9edf-1891f706fe9c' progress to 17 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1175.227690] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1175.227932] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1175.228078] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1175.228219] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69994) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1175.396071] env[69994]: DEBUG nova.scheduler.client.report [None req-042813eb-bd84-4b3d-96f9-e28b5ffbd66a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1175.560671] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] VM already powered off {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1175.560982] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Volume detach. Driver type: vmdk {{(pid=69994) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1175.561192] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-648027', 'volume_id': '11b8e021-7578-46be-b87c-5051c0a1cb4d', 'name': 'volume-11b8e021-7578-46be-b87c-5051c0a1cb4d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'be421d40-9859-4e0d-aef8-a2feb8717a78', 'attached_at': '', 'detached_at': '', 'volume_id': '11b8e021-7578-46be-b87c-5051c0a1cb4d', 'serial': '11b8e021-7578-46be-b87c-5051c0a1cb4d'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1175.561995] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6df9d687-6f70-4644-84da-792e3e2659fd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.582247] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0df8289-ad74-4237-9812-acfc6fd9cbe7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.586754] env[69994]: DEBUG nova.virt.hardware [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1175.586979] env[69994]: DEBUG nova.virt.hardware [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1175.587149] env[69994]: DEBUG nova.virt.hardware [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1175.587502] env[69994]: DEBUG nova.virt.hardware [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1175.587502] env[69994]: DEBUG nova.virt.hardware [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1175.587664] env[69994]: DEBUG nova.virt.hardware [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1175.587867] env[69994]: DEBUG nova.virt.hardware [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1175.588033] env[69994]: DEBUG nova.virt.hardware [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1175.588261] env[69994]: DEBUG nova.virt.hardware [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1175.588353] env[69994]: DEBUG nova.virt.hardware [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1175.589022] env[69994]: DEBUG nova.virt.hardware [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1175.593509] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a1832b6-299f-4807-9d29-0beb185be5ee {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.609281] env[69994]: WARNING nova.virt.vmwareapi.driver [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 1175.609586] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1175.610397] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c8aa677-1b14-428a-9580-1ea50be9a9e3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.614227] env[69994]: DEBUG oslo_vmware.api [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1175.614227] env[69994]: value = "task-3242681" [ 1175.614227] env[69994]: _type = "Task" [ 1175.614227] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.620244] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1175.620674] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5a321b23-0ccb-4abf-997e-6de86c11b8cf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.624953] env[69994]: DEBUG oslo_vmware.api [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242681, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.698060] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1175.698465] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1175.698774] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Deleting the datastore file [datastore1] be421d40-9859-4e0d-aef8-a2feb8717a78 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1175.699191] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-08993813-8418-4ce8-a3a1-ad7d2ec05d35 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.706491] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1175.706491] env[69994]: value = "task-3242683" [ 1175.706491] env[69994]: _type = "Task" [ 1175.706491] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.715385] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242683, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.903716] env[69994]: DEBUG oslo_concurrency.lockutils [None req-042813eb-bd84-4b3d-96f9-e28b5ffbd66a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.695s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1175.931615] env[69994]: INFO nova.scheduler.client.report [None req-042813eb-bd84-4b3d-96f9-e28b5ffbd66a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Deleted allocations for instance 90e411dd-26f3-421d-b2d0-620c61fe8476 [ 1176.125046] env[69994]: DEBUG oslo_vmware.api [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242681, 'name': ReconfigVM_Task, 'duration_secs': 0.202989} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.125413] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Updating instance 'eff21ec5-a51d-4004-9edf-1891f706fe9c' progress to 33 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1176.218221] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242683, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.163883} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.218221] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1176.218449] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1176.218513] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1176.438429] env[69994]: DEBUG oslo_concurrency.lockutils [None req-042813eb-bd84-4b3d-96f9-e28b5ffbd66a tempest-ImagesTestJSON-1478781762 tempest-ImagesTestJSON-1478781762-project-member] Lock "90e411dd-26f3-421d-b2d0-620c61fe8476" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.633s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1176.631750] env[69994]: DEBUG nova.virt.hardware [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1176.631976] env[69994]: DEBUG nova.virt.hardware [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1176.632118] env[69994]: DEBUG nova.virt.hardware [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1176.632343] env[69994]: DEBUG nova.virt.hardware [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1176.632500] env[69994]: DEBUG nova.virt.hardware [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1176.632649] env[69994]: DEBUG nova.virt.hardware [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1176.632857] env[69994]: DEBUG nova.virt.hardware [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1176.633188] env[69994]: DEBUG nova.virt.hardware [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1176.633458] env[69994]: DEBUG nova.virt.hardware [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1176.633667] env[69994]: DEBUG nova.virt.hardware [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1176.633892] env[69994]: DEBUG nova.virt.hardware [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1176.639467] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Reconfiguring VM instance instance-00000030 to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1176.639826] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f104a06-7d07-4b42-9439-ab74a9d2d3f6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.661454] env[69994]: DEBUG oslo_vmware.api [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1176.661454] env[69994]: value = "task-3242684" [ 1176.661454] env[69994]: _type = "Task" [ 1176.661454] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.670604] env[69994]: DEBUG oslo_vmware.api [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242684, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.723067] env[69994]: INFO nova.virt.block_device [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Booting with volume 11b8e021-7578-46be-b87c-5051c0a1cb4d at /dev/sdb [ 1176.760502] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9161369c-530f-42b8-ad8d-1aebcc631ca1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.772564] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8177ae02-0655-4704-826e-a7b923c6ab28 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.803695] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c742fe36-ea08-4a8b-a171-ca164950fa22 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.813587] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc06c243-828d-45b8-8f1d-1e855d310da9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.843199] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06941948-76c0-4ebf-8851-a02fba77b298 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.850566] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b60e7f43-cd52-4763-b9e8-7e146c8df8ef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.865250] env[69994]: DEBUG nova.virt.block_device [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Updating existing volume attachment record: 1b2d9259-2553-4ee1-8241-0da9f87c386c {{(pid=69994) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1177.172755] env[69994]: DEBUG oslo_vmware.api [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242684, 'name': ReconfigVM_Task, 'duration_secs': 0.188149} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.176101] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Reconfigured VM instance instance-00000030 to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1177.176101] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afdbc762-f3da-4689-82f8-d90367613383 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.205860] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Reconfiguring VM instance instance-00000030 to attach disk [datastore2] eff21ec5-a51d-4004-9edf-1891f706fe9c/eff21ec5-a51d-4004-9edf-1891f706fe9c.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1177.206449] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-48af4020-a2fe-42ee-be66-a67965610041 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.225874] env[69994]: DEBUG oslo_vmware.api [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1177.225874] env[69994]: value = "task-3242685" [ 1177.225874] env[69994]: _type = "Task" [ 1177.225874] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.235260] env[69994]: DEBUG oslo_vmware.api [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242685, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.738479] env[69994]: DEBUG oslo_vmware.api [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242685, 'name': ReconfigVM_Task, 'duration_secs': 0.27056} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.738915] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Reconfigured VM instance instance-00000030 to attach disk [datastore2] eff21ec5-a51d-4004-9edf-1891f706fe9c/eff21ec5-a51d-4004-9edf-1891f706fe9c.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1177.739238] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Updating instance 'eff21ec5-a51d-4004-9edf-1891f706fe9c' progress to 50 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1178.246746] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57c09870-3793-46da-bbc5-ff8a69ff626c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.271779] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71dbc301-abea-467c-8113-e4d60f293194 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.295421] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Updating instance 'eff21ec5-a51d-4004-9edf-1891f706fe9c' progress to 67 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1178.857282] env[69994]: DEBUG nova.network.neutron [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Port f7d5e758-a993-4a15-8bba-a695f99a96f4 binding to destination host cpu-1 is already ACTIVE {{(pid=69994) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1178.995257] env[69994]: DEBUG nova.virt.hardware [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1178.995517] env[69994]: DEBUG nova.virt.hardware [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1178.995677] env[69994]: DEBUG nova.virt.hardware [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1178.995872] env[69994]: DEBUG nova.virt.hardware [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1178.996540] env[69994]: DEBUG nova.virt.hardware [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1178.996754] env[69994]: DEBUG nova.virt.hardware [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1178.996991] env[69994]: DEBUG nova.virt.hardware [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1178.997192] env[69994]: DEBUG nova.virt.hardware [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1178.997327] env[69994]: DEBUG nova.virt.hardware [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1178.997494] env[69994]: DEBUG nova.virt.hardware [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1178.997741] env[69994]: DEBUG nova.virt.hardware [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1178.998834] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac46efa1-37c0-46b6-9029-cbe97c7d5700 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.008286] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be978973-dd27-4570-a354-11f58f314d41 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.022822] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3d:fe:51', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c2383c27-232e-4745-9b0a-2dcbaabb188b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ba2c9555-1cfb-479b-b793-f20615723d77', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1179.031901] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1179.032273] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1179.032552] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-84b0a4b7-3f09-4106-a1f5-9e8c4fb0535e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.054134] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1179.054134] env[69994]: value = "task-3242686" [ 1179.054134] env[69994]: _type = "Task" [ 1179.054134] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.062811] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242686, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.566959] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242686, 'name': CreateVM_Task, 'duration_secs': 0.359826} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.566959] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1179.567543] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1179.567650] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1179.567948] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1179.568661] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a2d0a8d-8064-4a4b-ad0a-7c117a597ef5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.574504] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1179.574504] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5288e152-9200-62a6-cd1e-2d148bcf3058" [ 1179.574504] env[69994]: _type = "Task" [ 1179.574504] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.583641] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5288e152-9200-62a6-cd1e-2d148bcf3058, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.892692] env[69994]: DEBUG oslo_concurrency.lockutils [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "eff21ec5-a51d-4004-9edf-1891f706fe9c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1179.893195] env[69994]: DEBUG oslo_concurrency.lockutils [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "eff21ec5-a51d-4004-9edf-1891f706fe9c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1179.893195] env[69994]: DEBUG oslo_concurrency.lockutils [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "eff21ec5-a51d-4004-9edf-1891f706fe9c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1180.087603] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5288e152-9200-62a6-cd1e-2d148bcf3058, 'name': SearchDatastore_Task, 'duration_secs': 0.010383} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.087971] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1180.088843] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1180.088843] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1180.088843] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1180.089106] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1180.089303] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a24f60a0-e69d-406a-a245-3ca1f4bdc17c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.102265] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1180.102487] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1180.103262] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e756a364-9b18-4ff7-856f-3a3896beea97 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.110053] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1180.110053] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52e03efb-31cd-a042-2e69-13be9cdcab38" [ 1180.110053] env[69994]: _type = "Task" [ 1180.110053] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.119332] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52e03efb-31cd-a042-2e69-13be9cdcab38, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.621726] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52e03efb-31cd-a042-2e69-13be9cdcab38, 'name': SearchDatastore_Task, 'duration_secs': 0.02645} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.622593] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e2af004-f4c1-4350-8b99-fc6e9b6fc049 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.634262] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1180.634262] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]528f1a1e-1fe9-18db-74a2-020f705b7fc4" [ 1180.634262] env[69994]: _type = "Task" [ 1180.634262] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.646853] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]528f1a1e-1fe9-18db-74a2-020f705b7fc4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.928759] env[69994]: DEBUG oslo_concurrency.lockutils [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "refresh_cache-eff21ec5-a51d-4004-9edf-1891f706fe9c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1180.929092] env[69994]: DEBUG oslo_concurrency.lockutils [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquired lock "refresh_cache-eff21ec5-a51d-4004-9edf-1891f706fe9c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1180.929395] env[69994]: DEBUG nova.network.neutron [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1181.145030] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]528f1a1e-1fe9-18db-74a2-020f705b7fc4, 'name': SearchDatastore_Task, 'duration_secs': 0.011566} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.145319] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1181.145578] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] be421d40-9859-4e0d-aef8-a2feb8717a78/be421d40-9859-4e0d-aef8-a2feb8717a78.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1181.145832] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-53d952ad-a9cb-45e6-a2a1-0b29d628e785 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.153479] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1181.153479] env[69994]: value = "task-3242687" [ 1181.153479] env[69994]: _type = "Task" [ 1181.153479] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.161662] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242687, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.664729] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242687, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.459185} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.665124] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] be421d40-9859-4e0d-aef8-a2feb8717a78/be421d40-9859-4e0d-aef8-a2feb8717a78.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1181.665201] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1181.665456] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bd21b1c1-3716-4f53-b204-c92755411836 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.674635] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1181.674635] env[69994]: value = "task-3242688" [ 1181.674635] env[69994]: _type = "Task" [ 1181.674635] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.680369] env[69994]: DEBUG nova.network.neutron [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Updating instance_info_cache with network_info: [{"id": "f7d5e758-a993-4a15-8bba-a695f99a96f4", "address": "fa:16:3e:f7:92:8c", "network": {"id": "ac8cc640-01c3-4a64-8b69-1458ee2131a1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1685085861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38d5a89ed7c248c3be506ef12caf5f1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7d5e758-a9", "ovs_interfaceid": "f7d5e758-a993-4a15-8bba-a695f99a96f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1181.684810] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242688, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.186821] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242688, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066528} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.186821] env[69994]: DEBUG oslo_concurrency.lockutils [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Releasing lock "refresh_cache-eff21ec5-a51d-4004-9edf-1891f706fe9c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1182.192090] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1182.192090] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd36d680-0ebc-4f41-a196-6a4adbcf29e0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.218193] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] be421d40-9859-4e0d-aef8-a2feb8717a78/be421d40-9859-4e0d-aef8-a2feb8717a78.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1182.218862] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ac1cebf6-168b-43dd-bf16-9b9f50eb2b5e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.243036] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1182.243036] env[69994]: value = "task-3242689" [ 1182.243036] env[69994]: _type = "Task" [ 1182.243036] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.253283] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242689, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.717769] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dcd9833-a46e-429e-852e-4722eca200ce {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.738360] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fba19d42-0559-49bc-ae83-fe02aa8d672b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.748173] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Updating instance 'eff21ec5-a51d-4004-9edf-1891f706fe9c' progress to 83 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1182.757440] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242689, 'name': ReconfigVM_Task, 'duration_secs': 0.2938} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.757762] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Reconfigured VM instance instance-00000064 to attach disk [datastore2] be421d40-9859-4e0d-aef8-a2feb8717a78/be421d40-9859-4e0d-aef8-a2feb8717a78.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1182.759415] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'disk_bus': None, 'encryption_secret_uuid': None, 'boot_index': 0, 'guest_format': None, 'device_name': '/dev/sda', 'encryption_format': None, 'size': 0, 'device_type': 'disk', 'encryption_options': None, 'encrypted': False, 'image_id': 'cc2e14cc-b12f-480a-a387-dd21e9efda8b'}], 'ephemerals': [], 'block_device_mapping': [{'attachment_id': '1b2d9259-2553-4ee1-8241-0da9f87c386c', 'disk_bus': None, 'guest_format': None, 'delete_on_termination': False, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-648027', 'volume_id': '11b8e021-7578-46be-b87c-5051c0a1cb4d', 'name': 'volume-11b8e021-7578-46be-b87c-5051c0a1cb4d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'be421d40-9859-4e0d-aef8-a2feb8717a78', 'attached_at': '', 'detached_at': '', 'volume_id': '11b8e021-7578-46be-b87c-5051c0a1cb4d', 'serial': '11b8e021-7578-46be-b87c-5051c0a1cb4d'}, 'mount_device': '/dev/sdb', 'boot_index': None, 'device_type': None, 'volume_type': None}], 'swap': None} {{(pid=69994) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1182.759665] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Volume attach. Driver type: vmdk {{(pid=69994) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1182.759886] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-648027', 'volume_id': '11b8e021-7578-46be-b87c-5051c0a1cb4d', 'name': 'volume-11b8e021-7578-46be-b87c-5051c0a1cb4d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'be421d40-9859-4e0d-aef8-a2feb8717a78', 'attached_at': '', 'detached_at': '', 'volume_id': '11b8e021-7578-46be-b87c-5051c0a1cb4d', 'serial': '11b8e021-7578-46be-b87c-5051c0a1cb4d'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1182.760718] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1e38ede-90e7-4f6f-b36e-6a801ecbde08 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.777792] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-febf778a-9df4-4838-b890-dda82786f550 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.803836] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] volume-11b8e021-7578-46be-b87c-5051c0a1cb4d/volume-11b8e021-7578-46be-b87c-5051c0a1cb4d.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1182.804109] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0bcea8a2-f2f1-41da-8cb6-238370cc5c17 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.827139] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1182.827139] env[69994]: value = "task-3242690" [ 1182.827139] env[69994]: _type = "Task" [ 1182.827139] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.836782] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242690, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.907483] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquiring lock "03e58b14-12fe-46e5-b483-4176d5a43c0e" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1182.907483] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lock "03e58b14-12fe-46e5-b483-4176d5a43c0e" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1182.907655] env[69994]: INFO nova.compute.manager [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Shelving [ 1183.256019] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1183.256019] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bd351aa6-63ab-40f3-b827-a8c7f5728605 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.262560] env[69994]: DEBUG oslo_vmware.api [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1183.262560] env[69994]: value = "task-3242691" [ 1183.262560] env[69994]: _type = "Task" [ 1183.262560] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.273760] env[69994]: DEBUG oslo_vmware.api [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242691, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.336853] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242690, 'name': ReconfigVM_Task, 'duration_secs': 0.3282} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.337150] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Reconfigured VM instance instance-00000064 to attach disk [datastore2] volume-11b8e021-7578-46be-b87c-5051c0a1cb4d/volume-11b8e021-7578-46be-b87c-5051c0a1cb4d.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1183.342382] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5ee2e648-938f-40b2-80ff-b01ce7c33891 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.357894] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1183.357894] env[69994]: value = "task-3242692" [ 1183.357894] env[69994]: _type = "Task" [ 1183.357894] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.366040] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242692, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.774021] env[69994]: DEBUG oslo_vmware.api [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242691, 'name': PowerOnVM_Task, 'duration_secs': 0.389454} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.774021] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1183.774426] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-19d893c2-8816-460a-a16c-9e94aa0660bc tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Updating instance 'eff21ec5-a51d-4004-9edf-1891f706fe9c' progress to 100 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1183.866900] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242692, 'name': ReconfigVM_Task, 'duration_secs': 0.141582} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.867239] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-648027', 'volume_id': '11b8e021-7578-46be-b87c-5051c0a1cb4d', 'name': 'volume-11b8e021-7578-46be-b87c-5051c0a1cb4d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'be421d40-9859-4e0d-aef8-a2feb8717a78', 'attached_at': '', 'detached_at': '', 'volume_id': '11b8e021-7578-46be-b87c-5051c0a1cb4d', 'serial': '11b8e021-7578-46be-b87c-5051c0a1cb4d'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1183.868584] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-31fa13ba-6f80-4800-ba32-a1c96b81c3bf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.874390] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "70fcf5b1-213f-4ff9-b675-282e7aa30e20" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1183.874622] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "70fcf5b1-213f-4ff9-b675-282e7aa30e20" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1183.878663] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1183.878663] env[69994]: value = "task-3242693" [ 1183.878663] env[69994]: _type = "Task" [ 1183.878663] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.887209] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242693, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.918319] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1183.918634] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-03e6597d-fa45-41b9-8da7-cbe32d28e8c7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.925014] env[69994]: DEBUG oslo_vmware.api [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1183.925014] env[69994]: value = "task-3242694" [ 1183.925014] env[69994]: _type = "Task" [ 1183.925014] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.935932] env[69994]: DEBUG oslo_vmware.api [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242694, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.376973] env[69994]: DEBUG nova.compute.manager [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1184.389848] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242693, 'name': Rename_Task, 'duration_secs': 0.183368} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.390126] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1184.390372] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9ea40b3d-8d0c-41bc-9a24-17febe551510 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.396648] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1184.396648] env[69994]: value = "task-3242695" [ 1184.396648] env[69994]: _type = "Task" [ 1184.396648] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.403897] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242695, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.436228] env[69994]: DEBUG oslo_vmware.api [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242694, 'name': PowerOffVM_Task, 'duration_secs': 0.176953} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.436574] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1184.437422] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b63553d9-b736-4cd2-9934-7a0d4182c874 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.455657] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8117ce93-e675-4df6-a91f-395539127066 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.909283] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242695, 'name': PowerOnVM_Task} progress is 85%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.914476] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1184.914795] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1184.916479] env[69994]: INFO nova.compute.claims [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1184.966287] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Creating Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1184.966540] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-8462c26f-0b02-4006-ab91-c0eaeaeacfdd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.975614] env[69994]: DEBUG oslo_vmware.api [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1184.975614] env[69994]: value = "task-3242696" [ 1184.975614] env[69994]: _type = "Task" [ 1184.975614] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.983879] env[69994]: DEBUG oslo_vmware.api [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242696, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.407544] env[69994]: DEBUG oslo_vmware.api [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242695, 'name': PowerOnVM_Task, 'duration_secs': 0.732653} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.407997] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1185.408191] env[69994]: DEBUG nova.compute.manager [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1185.408959] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a25a6f0e-de9b-4c68-b2f1-4dc2dfacf2d7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.484786] env[69994]: DEBUG oslo_vmware.api [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242696, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.695105] env[69994]: DEBUG oslo_concurrency.lockutils [None req-95792f52-85fa-4065-954f-0092a303b905 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "eff21ec5-a51d-4004-9edf-1891f706fe9c" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1185.695428] env[69994]: DEBUG oslo_concurrency.lockutils [None req-95792f52-85fa-4065-954f-0092a303b905 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "eff21ec5-a51d-4004-9edf-1891f706fe9c" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1185.695645] env[69994]: DEBUG nova.compute.manager [None req-95792f52-85fa-4065-954f-0092a303b905 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Going to confirm migration 8 {{(pid=69994) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1185.928480] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1185.986924] env[69994]: DEBUG oslo_vmware.api [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242696, 'name': CreateSnapshot_Task, 'duration_secs': 0.567472} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.989403] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Created Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1185.990454] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2383c17e-5ecd-41d4-a966-d954aaa00956 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.034505] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bae3baca-6b6a-4cf0-931c-22d1043c18af {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.042861] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-197bd22b-0be9-44b3-a3b3-6960114a1766 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.075499] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ba1b5c8-6d7e-4b04-bfb8-e87e78ff72d1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.083307] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b663643f-24bc-494a-9ee4-6442b051be34 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.097189] env[69994]: DEBUG nova.compute.provider_tree [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1186.262203] env[69994]: DEBUG oslo_concurrency.lockutils [None req-95792f52-85fa-4065-954f-0092a303b905 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "refresh_cache-eff21ec5-a51d-4004-9edf-1891f706fe9c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1186.262457] env[69994]: DEBUG oslo_concurrency.lockutils [None req-95792f52-85fa-4065-954f-0092a303b905 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquired lock "refresh_cache-eff21ec5-a51d-4004-9edf-1891f706fe9c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1186.262636] env[69994]: DEBUG nova.network.neutron [None req-95792f52-85fa-4065-954f-0092a303b905 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1186.262816] env[69994]: DEBUG nova.objects.instance [None req-95792f52-85fa-4065-954f-0092a303b905 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lazy-loading 'info_cache' on Instance uuid eff21ec5-a51d-4004-9edf-1891f706fe9c {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1186.510431] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Creating linked-clone VM from snapshot {{(pid=69994) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1186.510790] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-6fde9ed5-98d6-4e7f-8de9-57e8f78f1069 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.520339] env[69994]: DEBUG oslo_vmware.api [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1186.520339] env[69994]: value = "task-3242697" [ 1186.520339] env[69994]: _type = "Task" [ 1186.520339] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.528545] env[69994]: DEBUG oslo_vmware.api [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242697, 'name': CloneVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.600323] env[69994]: DEBUG nova.scheduler.client.report [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1187.030396] env[69994]: DEBUG oslo_vmware.api [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242697, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.105883] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.191s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1187.106390] env[69994]: DEBUG nova.compute.manager [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1187.109071] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 1.181s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1187.109257] env[69994]: DEBUG nova.objects.instance [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69994) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1187.255616] env[69994]: DEBUG oslo_concurrency.lockutils [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "d31f167f-8248-4aef-aa3c-6bc7259e1a80" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1187.255918] env[69994]: DEBUG oslo_concurrency.lockutils [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "d31f167f-8248-4aef-aa3c-6bc7259e1a80" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1187.256130] env[69994]: INFO nova.compute.manager [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Shelving [ 1187.474249] env[69994]: DEBUG nova.network.neutron [None req-95792f52-85fa-4065-954f-0092a303b905 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Updating instance_info_cache with network_info: [{"id": "f7d5e758-a993-4a15-8bba-a695f99a96f4", "address": "fa:16:3e:f7:92:8c", "network": {"id": "ac8cc640-01c3-4a64-8b69-1458ee2131a1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1685085861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38d5a89ed7c248c3be506ef12caf5f1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7d5e758-a9", "ovs_interfaceid": "f7d5e758-a993-4a15-8bba-a695f99a96f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1187.531056] env[69994]: DEBUG oslo_vmware.api [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242697, 'name': CloneVM_Task} progress is 95%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.613252] env[69994]: DEBUG nova.compute.utils [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1187.617517] env[69994]: DEBUG nova.compute.manager [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1187.617695] env[69994]: DEBUG nova.network.neutron [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1187.656353] env[69994]: DEBUG nova.policy [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aa110088642d455baaf060b5c9daaf5b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '70d05b502dfd4c5282872339c1e34d0c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1187.942598] env[69994]: DEBUG nova.network.neutron [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Successfully created port: 19e03de0-48c7-4499-a84b-4e2bf08e38e7 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1187.977459] env[69994]: DEBUG oslo_concurrency.lockutils [None req-95792f52-85fa-4065-954f-0092a303b905 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Releasing lock "refresh_cache-eff21ec5-a51d-4004-9edf-1891f706fe9c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1187.977714] env[69994]: DEBUG nova.objects.instance [None req-95792f52-85fa-4065-954f-0092a303b905 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lazy-loading 'migration_context' on Instance uuid eff21ec5-a51d-4004-9edf-1891f706fe9c {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1188.033528] env[69994]: DEBUG oslo_vmware.api [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242697, 'name': CloneVM_Task, 'duration_secs': 1.319175} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.033870] env[69994]: INFO nova.virt.vmwareapi.vmops [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Created linked-clone VM from snapshot [ 1188.034530] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c6ea024-2529-48dc-b598-46fd4db8542b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.041938] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Uploading image c19374ad-1dca-4b03-a7af-507e74541e09 {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1188.067192] env[69994]: DEBUG oslo_vmware.rw_handles [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1188.067192] env[69994]: value = "vm-648032" [ 1188.067192] env[69994]: _type = "VirtualMachine" [ 1188.067192] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1188.067464] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-e701d5b1-b7b5-482c-955f-089924e45903 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.074567] env[69994]: DEBUG oslo_vmware.rw_handles [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lease: (returnval){ [ 1188.074567] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5269a04b-1d52-4c4e-cda3-68b3c51180d5" [ 1188.074567] env[69994]: _type = "HttpNfcLease" [ 1188.074567] env[69994]: } obtained for exporting VM: (result){ [ 1188.074567] env[69994]: value = "vm-648032" [ 1188.074567] env[69994]: _type = "VirtualMachine" [ 1188.074567] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1188.074882] env[69994]: DEBUG oslo_vmware.api [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the lease: (returnval){ [ 1188.074882] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5269a04b-1d52-4c4e-cda3-68b3c51180d5" [ 1188.074882] env[69994]: _type = "HttpNfcLease" [ 1188.074882] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1188.081308] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1188.081308] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5269a04b-1d52-4c4e-cda3-68b3c51180d5" [ 1188.081308] env[69994]: _type = "HttpNfcLease" [ 1188.081308] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1188.118479] env[69994]: DEBUG nova.compute.manager [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1188.121959] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d225b35a-7bdf-4d3a-b9d4-0075378f9278 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.265321] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1188.265662] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e611b8c7-d177-4058-87bf-0875a7e985d8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.273936] env[69994]: DEBUG oslo_vmware.api [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1188.273936] env[69994]: value = "task-3242699" [ 1188.273936] env[69994]: _type = "Task" [ 1188.273936] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.281688] env[69994]: DEBUG oslo_vmware.api [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242699, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.481011] env[69994]: DEBUG nova.objects.base [None req-95792f52-85fa-4065-954f-0092a303b905 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1188.482029] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42c09dcf-517a-4134-a561-84160f596cf3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.510277] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11c1b7ad-992a-47f7-9a15-1d6a278ae51b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.517375] env[69994]: DEBUG oslo_vmware.api [None req-95792f52-85fa-4065-954f-0092a303b905 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1188.517375] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52648e9c-7fbc-64c0-917d-061429525578" [ 1188.517375] env[69994]: _type = "Task" [ 1188.517375] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.527324] env[69994]: DEBUG oslo_vmware.api [None req-95792f52-85fa-4065-954f-0092a303b905 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52648e9c-7fbc-64c0-917d-061429525578, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.582720] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1188.582720] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5269a04b-1d52-4c4e-cda3-68b3c51180d5" [ 1188.582720] env[69994]: _type = "HttpNfcLease" [ 1188.582720] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1188.583019] env[69994]: DEBUG oslo_vmware.rw_handles [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1188.583019] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5269a04b-1d52-4c4e-cda3-68b3c51180d5" [ 1188.583019] env[69994]: _type = "HttpNfcLease" [ 1188.583019] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1188.583725] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c554051-3ef7-476c-a2aa-d7f0dee48afa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.590476] env[69994]: DEBUG oslo_vmware.rw_handles [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fe7a28-2a15-d033-8b62-52ebea83a60e/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1188.590643] env[69994]: DEBUG oslo_vmware.rw_handles [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fe7a28-2a15-d033-8b62-52ebea83a60e/disk-0.vmdk for reading. {{(pid=69994) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1188.783697] env[69994]: DEBUG oslo_vmware.api [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242699, 'name': PowerOffVM_Task, 'duration_secs': 0.23783} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.783953] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1188.784726] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7536ba53-48c9-4e10-8953-05c45d0b2363 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.803914] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfe2ecde-4763-4aab-a5c0-b225effb8468 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.806525] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-abd85e4a-0171-4a7b-88d0-99ee532cb852 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.028212] env[69994]: DEBUG oslo_vmware.api [None req-95792f52-85fa-4065-954f-0092a303b905 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52648e9c-7fbc-64c0-917d-061429525578, 'name': SearchDatastore_Task, 'duration_secs': 0.012052} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.028705] env[69994]: DEBUG oslo_concurrency.lockutils [None req-95792f52-85fa-4065-954f-0092a303b905 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1189.029100] env[69994]: DEBUG oslo_concurrency.lockutils [None req-95792f52-85fa-4065-954f-0092a303b905 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1189.155141] env[69994]: DEBUG nova.compute.manager [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1189.180055] env[69994]: DEBUG nova.virt.hardware [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1189.180516] env[69994]: DEBUG nova.virt.hardware [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1189.180834] env[69994]: DEBUG nova.virt.hardware [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1189.181163] env[69994]: DEBUG nova.virt.hardware [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1189.181412] env[69994]: DEBUG nova.virt.hardware [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1189.181584] env[69994]: DEBUG nova.virt.hardware [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1189.181901] env[69994]: DEBUG nova.virt.hardware [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1189.182162] env[69994]: DEBUG nova.virt.hardware [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1189.182449] env[69994]: DEBUG nova.virt.hardware [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1189.182731] env[69994]: DEBUG nova.virt.hardware [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1189.182912] env[69994]: DEBUG nova.virt.hardware [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1189.183943] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76ed1205-d62a-4fc1-9e1b-17a02e5c105f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.192405] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c9f55e5-0b42-4dfe-a675-e493699975b6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.309023] env[69994]: DEBUG nova.compute.manager [req-ab101c79-f75f-4435-ace7-29a393d0c9ee req-0581c6bb-ee24-4216-a03c-eeb34a9cce35 service nova] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Received event network-vif-plugged-19e03de0-48c7-4499-a84b-4e2bf08e38e7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1189.309310] env[69994]: DEBUG oslo_concurrency.lockutils [req-ab101c79-f75f-4435-ace7-29a393d0c9ee req-0581c6bb-ee24-4216-a03c-eeb34a9cce35 service nova] Acquiring lock "70fcf5b1-213f-4ff9-b675-282e7aa30e20-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1189.309694] env[69994]: DEBUG oslo_concurrency.lockutils [req-ab101c79-f75f-4435-ace7-29a393d0c9ee req-0581c6bb-ee24-4216-a03c-eeb34a9cce35 service nova] Lock "70fcf5b1-213f-4ff9-b675-282e7aa30e20-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1189.309785] env[69994]: DEBUG oslo_concurrency.lockutils [req-ab101c79-f75f-4435-ace7-29a393d0c9ee req-0581c6bb-ee24-4216-a03c-eeb34a9cce35 service nova] Lock "70fcf5b1-213f-4ff9-b675-282e7aa30e20-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1189.309992] env[69994]: DEBUG nova.compute.manager [req-ab101c79-f75f-4435-ace7-29a393d0c9ee req-0581c6bb-ee24-4216-a03c-eeb34a9cce35 service nova] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] No waiting events found dispatching network-vif-plugged-19e03de0-48c7-4499-a84b-4e2bf08e38e7 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1189.310502] env[69994]: WARNING nova.compute.manager [req-ab101c79-f75f-4435-ace7-29a393d0c9ee req-0581c6bb-ee24-4216-a03c-eeb34a9cce35 service nova] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Received unexpected event network-vif-plugged-19e03de0-48c7-4499-a84b-4e2bf08e38e7 for instance with vm_state building and task_state spawning. [ 1189.319639] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Creating Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1189.320132] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-742478e7-b9b9-4d52-84c4-1ed5ae3e0fc4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.329070] env[69994]: DEBUG oslo_vmware.api [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1189.329070] env[69994]: value = "task-3242700" [ 1189.329070] env[69994]: _type = "Task" [ 1189.329070] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.338900] env[69994]: DEBUG oslo_vmware.api [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242700, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.404040] env[69994]: DEBUG nova.network.neutron [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Successfully updated port: 19e03de0-48c7-4499-a84b-4e2bf08e38e7 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1189.650221] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7551e012-11ff-497f-a263-b301d281d2bb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.658568] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3a18528-a104-450c-9412-acdf13521660 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.693998] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91df29dc-e738-4fa4-8eab-2303c4811d5f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.702204] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2a32ece-8270-4292-aa59-75f3b26d0173 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.716691] env[69994]: DEBUG nova.compute.provider_tree [None req-95792f52-85fa-4065-954f-0092a303b905 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1189.843940] env[69994]: DEBUG oslo_vmware.api [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242700, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.906333] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "refresh_cache-70fcf5b1-213f-4ff9-b675-282e7aa30e20" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1189.906510] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquired lock "refresh_cache-70fcf5b1-213f-4ff9-b675-282e7aa30e20" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1189.906663] env[69994]: DEBUG nova.network.neutron [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1190.220389] env[69994]: DEBUG nova.scheduler.client.report [None req-95792f52-85fa-4065-954f-0092a303b905 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1190.340137] env[69994]: DEBUG oslo_vmware.api [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242700, 'name': CreateSnapshot_Task, 'duration_secs': 0.793191} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.340681] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Created Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1190.341655] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d005eee3-a78c-49d7-ab21-ee3d9449756c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.439365] env[69994]: DEBUG nova.network.neutron [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1190.566344] env[69994]: DEBUG nova.network.neutron [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Updating instance_info_cache with network_info: [{"id": "19e03de0-48c7-4499-a84b-4e2bf08e38e7", "address": "fa:16:3e:2d:da:8e", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19e03de0-48", "ovs_interfaceid": "19e03de0-48c7-4499-a84b-4e2bf08e38e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1190.860199] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Creating linked-clone VM from snapshot {{(pid=69994) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1190.860586] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-450d19da-9eb1-4329-b5ec-41cd4336d31d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.870347] env[69994]: DEBUG oslo_vmware.api [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1190.870347] env[69994]: value = "task-3242701" [ 1190.870347] env[69994]: _type = "Task" [ 1190.870347] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.879641] env[69994]: DEBUG oslo_vmware.api [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242701, 'name': CloneVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.069453] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Releasing lock "refresh_cache-70fcf5b1-213f-4ff9-b675-282e7aa30e20" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1191.069823] env[69994]: DEBUG nova.compute.manager [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Instance network_info: |[{"id": "19e03de0-48c7-4499-a84b-4e2bf08e38e7", "address": "fa:16:3e:2d:da:8e", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19e03de0-48", "ovs_interfaceid": "19e03de0-48c7-4499-a84b-4e2bf08e38e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1191.070379] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2d:da:8e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a9d50784-eb90-48ae-a4ea-2125c52a50d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '19e03de0-48c7-4499-a84b-4e2bf08e38e7', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1191.078330] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Creating folder: Project (70d05b502dfd4c5282872339c1e34d0c). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1191.078651] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a7c05111-8347-485e-9837-4cf4be944316 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.089049] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Created folder: Project (70d05b502dfd4c5282872339c1e34d0c) in parent group-v647729. [ 1191.089300] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Creating folder: Instances. Parent ref: group-v648035. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1191.089571] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ee2761f3-60a0-4bb4-854d-83817adb6c8b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.099880] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Created folder: Instances in parent group-v648035. [ 1191.100177] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1191.100388] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1191.100620] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a0467ef6-8923-4df3-8ed9-d44418f31407 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.120778] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1191.120778] env[69994]: value = "task-3242704" [ 1191.120778] env[69994]: _type = "Task" [ 1191.120778] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.130599] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242704, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.231565] env[69994]: DEBUG oslo_concurrency.lockutils [None req-95792f52-85fa-4065-954f-0092a303b905 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.202s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1191.340469] env[69994]: DEBUG nova.compute.manager [req-4292102e-e075-4a13-839c-aff360f6d1b2 req-ae17af6f-394f-4b51-ad89-ca0bad5c1ee0 service nova] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Received event network-changed-19e03de0-48c7-4499-a84b-4e2bf08e38e7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1191.340626] env[69994]: DEBUG nova.compute.manager [req-4292102e-e075-4a13-839c-aff360f6d1b2 req-ae17af6f-394f-4b51-ad89-ca0bad5c1ee0 service nova] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Refreshing instance network info cache due to event network-changed-19e03de0-48c7-4499-a84b-4e2bf08e38e7. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1191.340845] env[69994]: DEBUG oslo_concurrency.lockutils [req-4292102e-e075-4a13-839c-aff360f6d1b2 req-ae17af6f-394f-4b51-ad89-ca0bad5c1ee0 service nova] Acquiring lock "refresh_cache-70fcf5b1-213f-4ff9-b675-282e7aa30e20" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1191.340986] env[69994]: DEBUG oslo_concurrency.lockutils [req-4292102e-e075-4a13-839c-aff360f6d1b2 req-ae17af6f-394f-4b51-ad89-ca0bad5c1ee0 service nova] Acquired lock "refresh_cache-70fcf5b1-213f-4ff9-b675-282e7aa30e20" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1191.341165] env[69994]: DEBUG nova.network.neutron [req-4292102e-e075-4a13-839c-aff360f6d1b2 req-ae17af6f-394f-4b51-ad89-ca0bad5c1ee0 service nova] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Refreshing network info cache for port 19e03de0-48c7-4499-a84b-4e2bf08e38e7 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1191.379613] env[69994]: DEBUG oslo_vmware.api [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242701, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.631930] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242704, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.795407] env[69994]: INFO nova.scheduler.client.report [None req-95792f52-85fa-4065-954f-0092a303b905 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Deleted allocation for migration 1f68e833-4694-4d0b-a2dd-8092109b9cfc [ 1191.881626] env[69994]: DEBUG oslo_vmware.api [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242701, 'name': CloneVM_Task} progress is 95%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.070607] env[69994]: DEBUG nova.network.neutron [req-4292102e-e075-4a13-839c-aff360f6d1b2 req-ae17af6f-394f-4b51-ad89-ca0bad5c1ee0 service nova] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Updated VIF entry in instance network info cache for port 19e03de0-48c7-4499-a84b-4e2bf08e38e7. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1192.070967] env[69994]: DEBUG nova.network.neutron [req-4292102e-e075-4a13-839c-aff360f6d1b2 req-ae17af6f-394f-4b51-ad89-ca0bad5c1ee0 service nova] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Updating instance_info_cache with network_info: [{"id": "19e03de0-48c7-4499-a84b-4e2bf08e38e7", "address": "fa:16:3e:2d:da:8e", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19e03de0-48", "ovs_interfaceid": "19e03de0-48c7-4499-a84b-4e2bf08e38e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1192.132031] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242704, 'name': CreateVM_Task, 'duration_secs': 0.554877} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.132178] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1192.132983] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1192.133192] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1192.133628] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1192.133941] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8f8480f-b494-44e9-aa38-6acf9e1f0e40 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.138767] env[69994]: DEBUG oslo_vmware.api [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1192.138767] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5200b96a-426b-5524-6d3b-9354771db408" [ 1192.138767] env[69994]: _type = "Task" [ 1192.138767] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.146626] env[69994]: DEBUG oslo_vmware.api [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5200b96a-426b-5524-6d3b-9354771db408, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.301579] env[69994]: DEBUG oslo_concurrency.lockutils [None req-95792f52-85fa-4065-954f-0092a303b905 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "eff21ec5-a51d-4004-9edf-1891f706fe9c" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.606s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1192.381848] env[69994]: DEBUG oslo_vmware.api [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242701, 'name': CloneVM_Task, 'duration_secs': 1.178232} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.381848] env[69994]: INFO nova.virt.vmwareapi.vmops [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Created linked-clone VM from snapshot [ 1192.382222] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-043abef0-bf57-4577-8107-18e344cf41f1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.389631] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Uploading image 4ad28a5f-4da1-4989-b13a-f34046f115ea {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1192.411752] env[69994]: DEBUG oslo_vmware.rw_handles [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1192.411752] env[69994]: value = "vm-648034" [ 1192.411752] env[69994]: _type = "VirtualMachine" [ 1192.411752] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1192.412033] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-d24383d4-1591-422b-b7a4-9573dc201f0e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.418827] env[69994]: DEBUG oslo_vmware.rw_handles [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lease: (returnval){ [ 1192.418827] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]527da1a6-bc58-e04c-a23e-916ab78aab08" [ 1192.418827] env[69994]: _type = "HttpNfcLease" [ 1192.418827] env[69994]: } obtained for exporting VM: (result){ [ 1192.418827] env[69994]: value = "vm-648034" [ 1192.418827] env[69994]: _type = "VirtualMachine" [ 1192.418827] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1192.419064] env[69994]: DEBUG oslo_vmware.api [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the lease: (returnval){ [ 1192.419064] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]527da1a6-bc58-e04c-a23e-916ab78aab08" [ 1192.419064] env[69994]: _type = "HttpNfcLease" [ 1192.419064] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1192.425634] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1192.425634] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]527da1a6-bc58-e04c-a23e-916ab78aab08" [ 1192.425634] env[69994]: _type = "HttpNfcLease" [ 1192.425634] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1192.574239] env[69994]: DEBUG oslo_concurrency.lockutils [req-4292102e-e075-4a13-839c-aff360f6d1b2 req-ae17af6f-394f-4b51-ad89-ca0bad5c1ee0 service nova] Releasing lock "refresh_cache-70fcf5b1-213f-4ff9-b675-282e7aa30e20" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1192.650735] env[69994]: DEBUG oslo_vmware.api [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5200b96a-426b-5524-6d3b-9354771db408, 'name': SearchDatastore_Task, 'duration_secs': 0.048308} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.651041] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1192.651274] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1192.651528] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1192.651706] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1192.651888] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1192.652141] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9fbdc9ab-983f-4ad8-afd7-c3f36a9b3989 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.659934] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1192.660119] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1192.660880] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09ae08d7-f35e-470d-80f8-aeab7f2699a3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.665940] env[69994]: DEBUG oslo_vmware.api [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1192.665940] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]520fd45a-a8bd-94d9-d3e2-2684585a297b" [ 1192.665940] env[69994]: _type = "Task" [ 1192.665940] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.673081] env[69994]: DEBUG oslo_vmware.api [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]520fd45a-a8bd-94d9-d3e2-2684585a297b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.822413] env[69994]: DEBUG oslo_concurrency.lockutils [None req-992f36eb-f35b-4b71-9676-f3c2d75a6967 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "eff21ec5-a51d-4004-9edf-1891f706fe9c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1192.822694] env[69994]: DEBUG oslo_concurrency.lockutils [None req-992f36eb-f35b-4b71-9676-f3c2d75a6967 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "eff21ec5-a51d-4004-9edf-1891f706fe9c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1192.822906] env[69994]: DEBUG oslo_concurrency.lockutils [None req-992f36eb-f35b-4b71-9676-f3c2d75a6967 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "eff21ec5-a51d-4004-9edf-1891f706fe9c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1192.823126] env[69994]: DEBUG oslo_concurrency.lockutils [None req-992f36eb-f35b-4b71-9676-f3c2d75a6967 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "eff21ec5-a51d-4004-9edf-1891f706fe9c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1192.823360] env[69994]: DEBUG oslo_concurrency.lockutils [None req-992f36eb-f35b-4b71-9676-f3c2d75a6967 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "eff21ec5-a51d-4004-9edf-1891f706fe9c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1192.825651] env[69994]: INFO nova.compute.manager [None req-992f36eb-f35b-4b71-9676-f3c2d75a6967 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Terminating instance [ 1192.927714] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1192.927714] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]527da1a6-bc58-e04c-a23e-916ab78aab08" [ 1192.927714] env[69994]: _type = "HttpNfcLease" [ 1192.927714] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1192.928120] env[69994]: DEBUG oslo_vmware.rw_handles [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1192.928120] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]527da1a6-bc58-e04c-a23e-916ab78aab08" [ 1192.928120] env[69994]: _type = "HttpNfcLease" [ 1192.928120] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1192.928888] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee26aa0a-b311-46c6-b35a-48144bc5642b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.936711] env[69994]: DEBUG oslo_vmware.rw_handles [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52bf495b-e877-8c36-99b4-0b78caf71efa/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1192.936897] env[69994]: DEBUG oslo_vmware.rw_handles [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52bf495b-e877-8c36-99b4-0b78caf71efa/disk-0.vmdk for reading. {{(pid=69994) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1193.030452] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-45273b9a-700d-456e-ad50-58d38a3aa659 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.177020] env[69994]: DEBUG oslo_vmware.api [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]520fd45a-a8bd-94d9-d3e2-2684585a297b, 'name': SearchDatastore_Task, 'duration_secs': 0.008344} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.177887] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2023f20f-ec26-4ae6-b7db-9f6468f4dac0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.183244] env[69994]: DEBUG oslo_vmware.api [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1193.183244] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52c74c65-65de-badb-369e-af5a9dfd129d" [ 1193.183244] env[69994]: _type = "Task" [ 1193.183244] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.190961] env[69994]: DEBUG oslo_vmware.api [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52c74c65-65de-badb-369e-af5a9dfd129d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.329022] env[69994]: DEBUG nova.compute.manager [None req-992f36eb-f35b-4b71-9676-f3c2d75a6967 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1193.329388] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-992f36eb-f35b-4b71-9676-f3c2d75a6967 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1193.330265] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85a88c07-bd87-46ac-9025-d938b4128c3a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.337888] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-992f36eb-f35b-4b71-9676-f3c2d75a6967 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1193.338181] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cff2b891-4814-47a6-82d0-11228d2b532b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.344718] env[69994]: DEBUG oslo_vmware.api [None req-992f36eb-f35b-4b71-9676-f3c2d75a6967 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1193.344718] env[69994]: value = "task-3242706" [ 1193.344718] env[69994]: _type = "Task" [ 1193.344718] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.353222] env[69994]: DEBUG oslo_vmware.api [None req-992f36eb-f35b-4b71-9676-f3c2d75a6967 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242706, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.693804] env[69994]: DEBUG oslo_vmware.api [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52c74c65-65de-badb-369e-af5a9dfd129d, 'name': SearchDatastore_Task, 'duration_secs': 0.01059} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.694404] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1193.694404] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 70fcf5b1-213f-4ff9-b675-282e7aa30e20/70fcf5b1-213f-4ff9-b675-282e7aa30e20.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1193.694404] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fd5b567a-8225-48de-8da2-d50d020a7d00 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.702608] env[69994]: DEBUG oslo_vmware.api [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1193.702608] env[69994]: value = "task-3242707" [ 1193.702608] env[69994]: _type = "Task" [ 1193.702608] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.710052] env[69994]: DEBUG oslo_vmware.api [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242707, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.854041] env[69994]: DEBUG oslo_vmware.api [None req-992f36eb-f35b-4b71-9676-f3c2d75a6967 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242706, 'name': PowerOffVM_Task, 'duration_secs': 0.227598} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.854410] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-992f36eb-f35b-4b71-9676-f3c2d75a6967 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1193.854659] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-992f36eb-f35b-4b71-9676-f3c2d75a6967 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1193.854963] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c0bb2dd9-58af-4a57-a141-54dcd6aa7c2c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.960437] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-992f36eb-f35b-4b71-9676-f3c2d75a6967 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1193.960793] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-992f36eb-f35b-4b71-9676-f3c2d75a6967 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1193.961068] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-992f36eb-f35b-4b71-9676-f3c2d75a6967 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Deleting the datastore file [datastore2] eff21ec5-a51d-4004-9edf-1891f706fe9c {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1193.961534] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-05c8beca-60f0-4862-948e-8c2bd1a65390 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.973033] env[69994]: DEBUG oslo_vmware.api [None req-992f36eb-f35b-4b71-9676-f3c2d75a6967 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1193.973033] env[69994]: value = "task-3242709" [ 1193.973033] env[69994]: _type = "Task" [ 1193.973033] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.981588] env[69994]: DEBUG oslo_vmware.api [None req-992f36eb-f35b-4b71-9676-f3c2d75a6967 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242709, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.211708] env[69994]: DEBUG oslo_vmware.api [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242707, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.483401] env[69994]: DEBUG oslo_vmware.api [None req-992f36eb-f35b-4b71-9676-f3c2d75a6967 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242709, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.373223} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.483795] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-992f36eb-f35b-4b71-9676-f3c2d75a6967 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1194.483795] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-992f36eb-f35b-4b71-9676-f3c2d75a6967 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1194.483995] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-992f36eb-f35b-4b71-9676-f3c2d75a6967 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1194.484186] env[69994]: INFO nova.compute.manager [None req-992f36eb-f35b-4b71-9676-f3c2d75a6967 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1194.484446] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-992f36eb-f35b-4b71-9676-f3c2d75a6967 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1194.484768] env[69994]: DEBUG nova.compute.manager [-] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1194.484956] env[69994]: DEBUG nova.network.neutron [-] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1194.714306] env[69994]: DEBUG oslo_vmware.api [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242707, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.533095} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.715118] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 70fcf5b1-213f-4ff9-b675-282e7aa30e20/70fcf5b1-213f-4ff9-b675-282e7aa30e20.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1194.715118] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1194.716022] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a74937c3-6689-4a4b-a13a-277143b7a284 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.723033] env[69994]: DEBUG oslo_vmware.api [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1194.723033] env[69994]: value = "task-3242710" [ 1194.723033] env[69994]: _type = "Task" [ 1194.723033] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.737966] env[69994]: DEBUG oslo_vmware.api [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242710, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.006887] env[69994]: DEBUG nova.compute.manager [req-48b4e9a4-e14c-44ca-8b1c-1648aca03f71 req-aabc5a23-3bb3-4fa3-8f8c-ac37330604b7 service nova] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Received event network-vif-deleted-f7d5e758-a993-4a15-8bba-a695f99a96f4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1195.007143] env[69994]: INFO nova.compute.manager [req-48b4e9a4-e14c-44ca-8b1c-1648aca03f71 req-aabc5a23-3bb3-4fa3-8f8c-ac37330604b7 service nova] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Neutron deleted interface f7d5e758-a993-4a15-8bba-a695f99a96f4; detaching it from the instance and deleting it from the info cache [ 1195.007304] env[69994]: DEBUG nova.network.neutron [req-48b4e9a4-e14c-44ca-8b1c-1648aca03f71 req-aabc5a23-3bb3-4fa3-8f8c-ac37330604b7 service nova] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1195.235693] env[69994]: DEBUG oslo_vmware.api [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242710, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.100922} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.235693] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1195.236868] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad128949-4d38-441b-9bbd-ecde6e218730 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.262046] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Reconfiguring VM instance instance-0000006b to attach disk [datastore2] 70fcf5b1-213f-4ff9-b675-282e7aa30e20/70fcf5b1-213f-4ff9-b675-282e7aa30e20.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1195.262046] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e6cc19f8-cc21-4f23-b707-7b8321541fb6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.288780] env[69994]: DEBUG oslo_vmware.api [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1195.288780] env[69994]: value = "task-3242711" [ 1195.288780] env[69994]: _type = "Task" [ 1195.288780] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.297068] env[69994]: DEBUG oslo_vmware.api [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242711, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.488220] env[69994]: DEBUG nova.network.neutron [-] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1195.509915] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-583816c7-240c-4cca-86dd-88476b2eec81 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.519604] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf48e8a5-0b66-4ad4-aebd-5c460378f79f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.550476] env[69994]: DEBUG nova.compute.manager [req-48b4e9a4-e14c-44ca-8b1c-1648aca03f71 req-aabc5a23-3bb3-4fa3-8f8c-ac37330604b7 service nova] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Detach interface failed, port_id=f7d5e758-a993-4a15-8bba-a695f99a96f4, reason: Instance eff21ec5-a51d-4004-9edf-1891f706fe9c could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1195.798086] env[69994]: DEBUG oslo_vmware.api [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242711, 'name': ReconfigVM_Task, 'duration_secs': 0.374799} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.798400] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Reconfigured VM instance instance-0000006b to attach disk [datastore2] 70fcf5b1-213f-4ff9-b675-282e7aa30e20/70fcf5b1-213f-4ff9-b675-282e7aa30e20.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1195.799110] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6fb179c3-b471-4855-801e-31e1958a9568 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.806508] env[69994]: DEBUG oslo_vmware.api [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1195.806508] env[69994]: value = "task-3242712" [ 1195.806508] env[69994]: _type = "Task" [ 1195.806508] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.816600] env[69994]: DEBUG oslo_vmware.api [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242712, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.929066] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._sync_power_states {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1195.991484] env[69994]: INFO nova.compute.manager [-] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Took 1.51 seconds to deallocate network for instance. [ 1196.317557] env[69994]: DEBUG oslo_vmware.api [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242712, 'name': Rename_Task, 'duration_secs': 0.228905} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.317871] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1196.318145] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c5ea6c98-c628-4cbb-bdbe-7e914d6767e3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.325058] env[69994]: DEBUG oslo_vmware.api [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1196.325058] env[69994]: value = "task-3242713" [ 1196.325058] env[69994]: _type = "Task" [ 1196.325058] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.337340] env[69994]: DEBUG oslo_vmware.api [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242713, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.433243] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Getting list of instances from cluster (obj){ [ 1196.433243] env[69994]: value = "domain-c8" [ 1196.433243] env[69994]: _type = "ClusterComputeResource" [ 1196.433243] env[69994]: } {{(pid=69994) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1196.434363] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e874dd8-71df-4220-a4f2-a93792f50151 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.447689] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Got total of 5 instances {{(pid=69994) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1196.447856] env[69994]: WARNING nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] While synchronizing instance power states, found 6 instances in the database and 5 instances on the hypervisor. [ 1196.447998] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Triggering sync for uuid eff21ec5-a51d-4004-9edf-1891f706fe9c {{(pid=69994) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1196.448227] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Triggering sync for uuid ef37ce64-2c26-4080-899a-6d9dbb5850c9 {{(pid=69994) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1196.448379] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Triggering sync for uuid be421d40-9859-4e0d-aef8-a2feb8717a78 {{(pid=69994) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1196.448532] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Triggering sync for uuid 03e58b14-12fe-46e5-b483-4176d5a43c0e {{(pid=69994) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1196.448684] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Triggering sync for uuid d31f167f-8248-4aef-aa3c-6bc7259e1a80 {{(pid=69994) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1196.448832] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Triggering sync for uuid 70fcf5b1-213f-4ff9-b675-282e7aa30e20 {{(pid=69994) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1196.449201] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Acquiring lock "eff21ec5-a51d-4004-9edf-1891f706fe9c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1196.449448] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Acquiring lock "ef37ce64-2c26-4080-899a-6d9dbb5850c9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1196.449642] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "ef37ce64-2c26-4080-899a-6d9dbb5850c9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1196.449911] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Acquiring lock "be421d40-9859-4e0d-aef8-a2feb8717a78" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1196.450108] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "be421d40-9859-4e0d-aef8-a2feb8717a78" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1196.450351] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Acquiring lock "03e58b14-12fe-46e5-b483-4176d5a43c0e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1196.450561] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Acquiring lock "d31f167f-8248-4aef-aa3c-6bc7259e1a80" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1196.450766] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Acquiring lock "70fcf5b1-213f-4ff9-b675-282e7aa30e20" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1196.451634] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f80e8860-4dbf-498a-9a66-f54e4aa4ee5f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.454986] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-303bbc63-e09e-4fb8-979e-1b4f09781004 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.498267] env[69994]: DEBUG oslo_concurrency.lockutils [None req-992f36eb-f35b-4b71-9676-f3c2d75a6967 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1196.498635] env[69994]: DEBUG oslo_concurrency.lockutils [None req-992f36eb-f35b-4b71-9676-f3c2d75a6967 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1196.498799] env[69994]: DEBUG oslo_concurrency.lockutils [None req-992f36eb-f35b-4b71-9676-f3c2d75a6967 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1196.527509] env[69994]: INFO nova.scheduler.client.report [None req-992f36eb-f35b-4b71-9676-f3c2d75a6967 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Deleted allocations for instance eff21ec5-a51d-4004-9edf-1891f706fe9c [ 1196.836326] env[69994]: DEBUG oslo_vmware.api [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242713, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.966507] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "ef37ce64-2c26-4080-899a-6d9dbb5850c9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.517s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1196.966973] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "be421d40-9859-4e0d-aef8-a2feb8717a78" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.517s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1197.034468] env[69994]: DEBUG oslo_concurrency.lockutils [None req-992f36eb-f35b-4b71-9676-f3c2d75a6967 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "eff21ec5-a51d-4004-9edf-1891f706fe9c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.212s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1197.035466] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "eff21ec5-a51d-4004-9edf-1891f706fe9c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.586s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1197.035741] env[69994]: INFO nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] During sync_power_state the instance has a pending task (deleting). Skip. [ 1197.035968] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "eff21ec5-a51d-4004-9edf-1891f706fe9c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1197.038256] env[69994]: DEBUG oslo_vmware.rw_handles [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fe7a28-2a15-d033-8b62-52ebea83a60e/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1197.039274] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8a13b3e-0dc7-433b-82fd-ecf302406bb9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.046455] env[69994]: DEBUG oslo_vmware.rw_handles [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fe7a28-2a15-d033-8b62-52ebea83a60e/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1197.046626] env[69994]: ERROR oslo_vmware.rw_handles [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fe7a28-2a15-d033-8b62-52ebea83a60e/disk-0.vmdk due to incomplete transfer. [ 1197.046863] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-7fb320be-814e-43e9-b570-900e94f30634 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.055786] env[69994]: DEBUG oslo_vmware.rw_handles [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fe7a28-2a15-d033-8b62-52ebea83a60e/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1197.055995] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Uploaded image c19374ad-1dca-4b03-a7af-507e74541e09 to the Glance image server {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1197.058610] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Destroying the VM {{(pid=69994) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1197.059126] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e776f4e6-6e74-4b7c-8096-14cf8ed5f96d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.065127] env[69994]: DEBUG oslo_vmware.api [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1197.065127] env[69994]: value = "task-3242714" [ 1197.065127] env[69994]: _type = "Task" [ 1197.065127] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.073999] env[69994]: DEBUG oslo_vmware.api [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242714, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.335681] env[69994]: DEBUG oslo_vmware.api [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242713, 'name': PowerOnVM_Task, 'duration_secs': 0.560429} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.335681] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1197.335904] env[69994]: INFO nova.compute.manager [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Took 8.18 seconds to spawn the instance on the hypervisor. [ 1197.336079] env[69994]: DEBUG nova.compute.manager [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1197.336838] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f4880fd-4d03-43ac-bce5-ade060744025 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.578577] env[69994]: DEBUG oslo_vmware.api [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242714, 'name': Destroy_Task, 'duration_secs': 0.326284} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.578959] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Destroyed the VM [ 1197.579167] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Deleting Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1197.579425] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-da427b54-7ec5-4d3c-80fb-c7d257cf9e47 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.586081] env[69994]: DEBUG oslo_vmware.api [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1197.586081] env[69994]: value = "task-3242715" [ 1197.586081] env[69994]: _type = "Task" [ 1197.586081] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.593767] env[69994]: DEBUG oslo_vmware.api [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242715, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.853533] env[69994]: INFO nova.compute.manager [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Took 12.97 seconds to build instance. [ 1197.946637] env[69994]: DEBUG oslo_concurrency.lockutils [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "eea243fb-97fc-4c65-8699-1b3c321bd250" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1197.946884] env[69994]: DEBUG oslo_concurrency.lockutils [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "eea243fb-97fc-4c65-8699-1b3c321bd250" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1198.095840] env[69994]: DEBUG oslo_vmware.api [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242715, 'name': RemoveSnapshot_Task, 'duration_secs': 0.362823} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.096121] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Deleted Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1198.096386] env[69994]: DEBUG nova.compute.manager [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1198.097159] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1ce9022-fc5d-4493-9996-76bd134d340d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.236447] env[69994]: DEBUG nova.compute.manager [req-7932a82b-4e29-424b-a808-e0a027bd058c req-b9455611-c739-4aef-9929-fa3b49beb572 service nova] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Received event network-changed-19e03de0-48c7-4499-a84b-4e2bf08e38e7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1198.236680] env[69994]: DEBUG nova.compute.manager [req-7932a82b-4e29-424b-a808-e0a027bd058c req-b9455611-c739-4aef-9929-fa3b49beb572 service nova] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Refreshing instance network info cache due to event network-changed-19e03de0-48c7-4499-a84b-4e2bf08e38e7. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1198.236868] env[69994]: DEBUG oslo_concurrency.lockutils [req-7932a82b-4e29-424b-a808-e0a027bd058c req-b9455611-c739-4aef-9929-fa3b49beb572 service nova] Acquiring lock "refresh_cache-70fcf5b1-213f-4ff9-b675-282e7aa30e20" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1198.237018] env[69994]: DEBUG oslo_concurrency.lockutils [req-7932a82b-4e29-424b-a808-e0a027bd058c req-b9455611-c739-4aef-9929-fa3b49beb572 service nova] Acquired lock "refresh_cache-70fcf5b1-213f-4ff9-b675-282e7aa30e20" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1198.237184] env[69994]: DEBUG nova.network.neutron [req-7932a82b-4e29-424b-a808-e0a027bd058c req-b9455611-c739-4aef-9929-fa3b49beb572 service nova] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Refreshing network info cache for port 19e03de0-48c7-4499-a84b-4e2bf08e38e7 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1198.355987] env[69994]: DEBUG oslo_concurrency.lockutils [None req-82d8322a-eb4c-435f-b7b0-ef969007b772 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "70fcf5b1-213f-4ff9-b675-282e7aa30e20" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.481s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1198.356304] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "70fcf5b1-213f-4ff9-b675-282e7aa30e20" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 1.905s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1198.356497] env[69994]: INFO nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] During sync_power_state the instance has a pending task (spawning). Skip. [ 1198.356671] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "70fcf5b1-213f-4ff9-b675-282e7aa30e20" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1198.449199] env[69994]: DEBUG nova.compute.manager [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1198.608541] env[69994]: INFO nova.compute.manager [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Shelve offloading [ 1198.971015] env[69994]: DEBUG oslo_concurrency.lockutils [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1198.971331] env[69994]: DEBUG oslo_concurrency.lockutils [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1198.972908] env[69994]: INFO nova.compute.claims [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1198.980106] env[69994]: DEBUG nova.network.neutron [req-7932a82b-4e29-424b-a808-e0a027bd058c req-b9455611-c739-4aef-9929-fa3b49beb572 service nova] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Updated VIF entry in instance network info cache for port 19e03de0-48c7-4499-a84b-4e2bf08e38e7. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1198.980463] env[69994]: DEBUG nova.network.neutron [req-7932a82b-4e29-424b-a808-e0a027bd058c req-b9455611-c739-4aef-9929-fa3b49beb572 service nova] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Updating instance_info_cache with network_info: [{"id": "19e03de0-48c7-4499-a84b-4e2bf08e38e7", "address": "fa:16:3e:2d:da:8e", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.137", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19e03de0-48", "ovs_interfaceid": "19e03de0-48c7-4499-a84b-4e2bf08e38e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1199.112546] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1199.112853] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1f8fb7e8-1d49-4979-bdab-8fed43c6fb2d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.120644] env[69994]: DEBUG oslo_vmware.api [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1199.120644] env[69994]: value = "task-3242716" [ 1199.120644] env[69994]: _type = "Task" [ 1199.120644] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.128436] env[69994]: DEBUG oslo_vmware.api [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242716, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.483386] env[69994]: DEBUG oslo_concurrency.lockutils [req-7932a82b-4e29-424b-a808-e0a027bd058c req-b9455611-c739-4aef-9929-fa3b49beb572 service nova] Releasing lock "refresh_cache-70fcf5b1-213f-4ff9-b675-282e7aa30e20" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1199.633986] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] VM already powered off {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1199.634391] env[69994]: DEBUG nova.compute.manager [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1199.635139] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-361fc438-bf4d-4a4c-b1d0-a7cf45ac0600 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.641575] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquiring lock "refresh_cache-03e58b14-12fe-46e5-b483-4176d5a43c0e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1199.641781] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquired lock "refresh_cache-03e58b14-12fe-46e5-b483-4176d5a43c0e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1199.641972] env[69994]: DEBUG nova.network.neutron [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1200.065894] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-567af8ab-a8d4-4120-b39e-d193a2530962 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.073655] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1d6edf4-e55a-49b0-ae51-c0e5df937cd8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.104286] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e9cf9ab-a076-4679-bf01-d9fa747367bd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.111509] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9976cc5-97c1-4830-b73b-f774bb2fb8a2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.124543] env[69994]: DEBUG nova.compute.provider_tree [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1200.414786] env[69994]: DEBUG nova.network.neutron [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Updating instance_info_cache with network_info: [{"id": "fc354355-eb77-47cd-9f5b-89c8e6616b1d", "address": "fa:16:3e:d3:d6:b9", "network": {"id": "24596b1b-4e9c-466d-85f9-ff79760278bd", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1747505141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "352ad5b68db1480eb657935e006d7dbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc354355-eb", "ovs_interfaceid": "fc354355-eb77-47cd-9f5b-89c8e6616b1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1200.629123] env[69994]: DEBUG nova.scheduler.client.report [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1200.917949] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Releasing lock "refresh_cache-03e58b14-12fe-46e5-b483-4176d5a43c0e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1201.133325] env[69994]: DEBUG oslo_concurrency.lockutils [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.162s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1201.134041] env[69994]: DEBUG nova.compute.manager [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1201.166552] env[69994]: DEBUG nova.compute.manager [req-2242a146-6ca8-41bf-a761-d109620d0fd4 req-fb62fec1-8f4e-4e7d-8271-6ffb10e8afe4 service nova] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Received event network-vif-unplugged-fc354355-eb77-47cd-9f5b-89c8e6616b1d {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1201.167667] env[69994]: DEBUG oslo_concurrency.lockutils [req-2242a146-6ca8-41bf-a761-d109620d0fd4 req-fb62fec1-8f4e-4e7d-8271-6ffb10e8afe4 service nova] Acquiring lock "03e58b14-12fe-46e5-b483-4176d5a43c0e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1201.167667] env[69994]: DEBUG oslo_concurrency.lockutils [req-2242a146-6ca8-41bf-a761-d109620d0fd4 req-fb62fec1-8f4e-4e7d-8271-6ffb10e8afe4 service nova] Lock "03e58b14-12fe-46e5-b483-4176d5a43c0e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1201.167667] env[69994]: DEBUG oslo_concurrency.lockutils [req-2242a146-6ca8-41bf-a761-d109620d0fd4 req-fb62fec1-8f4e-4e7d-8271-6ffb10e8afe4 service nova] Lock "03e58b14-12fe-46e5-b483-4176d5a43c0e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1201.167667] env[69994]: DEBUG nova.compute.manager [req-2242a146-6ca8-41bf-a761-d109620d0fd4 req-fb62fec1-8f4e-4e7d-8271-6ffb10e8afe4 service nova] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] No waiting events found dispatching network-vif-unplugged-fc354355-eb77-47cd-9f5b-89c8e6616b1d {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1201.167667] env[69994]: WARNING nova.compute.manager [req-2242a146-6ca8-41bf-a761-d109620d0fd4 req-fb62fec1-8f4e-4e7d-8271-6ffb10e8afe4 service nova] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Received unexpected event network-vif-unplugged-fc354355-eb77-47cd-9f5b-89c8e6616b1d for instance with vm_state shelved and task_state shelving_offloading. [ 1201.253897] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1201.254919] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dec2e7e0-2481-4b9d-b7fe-18c3e4d75144 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.263327] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1201.263586] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-35fd5d4d-3c72-4341-813c-e0cff7ff6ae5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.330266] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1201.330565] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1201.330791] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Deleting the datastore file [datastore1] 03e58b14-12fe-46e5-b483-4176d5a43c0e {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1201.331104] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0a74abcd-0bd7-4b34-98e4-98ef5468d213 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.338821] env[69994]: DEBUG oslo_vmware.api [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1201.338821] env[69994]: value = "task-3242718" [ 1201.338821] env[69994]: _type = "Task" [ 1201.338821] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.346696] env[69994]: DEBUG oslo_vmware.api [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242718, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.641374] env[69994]: DEBUG nova.compute.utils [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1201.643391] env[69994]: DEBUG nova.compute.manager [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1201.643612] env[69994]: DEBUG nova.network.neutron [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1201.689571] env[69994]: DEBUG nova.policy [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '08a2b92b6c0141a6a7e301e064032289', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '38d5a89ed7c248c3be506ef12caf5f1e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1201.850243] env[69994]: DEBUG oslo_vmware.api [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242718, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.176164} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.850549] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1201.850752] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1201.850986] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1201.877908] env[69994]: INFO nova.scheduler.client.report [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Deleted allocations for instance 03e58b14-12fe-46e5-b483-4176d5a43c0e [ 1201.975563] env[69994]: DEBUG nova.network.neutron [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Successfully created port: a8446ab9-60ee-4fc2-8b75-e53b3b39a38f {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1202.146827] env[69994]: DEBUG nova.compute.manager [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1202.382834] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1202.383090] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1202.383330] env[69994]: DEBUG nova.objects.instance [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lazy-loading 'resources' on Instance uuid 03e58b14-12fe-46e5-b483-4176d5a43c0e {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1202.886692] env[69994]: DEBUG nova.objects.instance [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lazy-loading 'numa_topology' on Instance uuid 03e58b14-12fe-46e5-b483-4176d5a43c0e {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1203.157703] env[69994]: DEBUG nova.compute.manager [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1203.187214] env[69994]: DEBUG nova.virt.hardware [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1203.187380] env[69994]: DEBUG nova.virt.hardware [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1203.187573] env[69994]: DEBUG nova.virt.hardware [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1203.188691] env[69994]: DEBUG nova.virt.hardware [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1203.188691] env[69994]: DEBUG nova.virt.hardware [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1203.188691] env[69994]: DEBUG nova.virt.hardware [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1203.188691] env[69994]: DEBUG nova.virt.hardware [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1203.188691] env[69994]: DEBUG nova.virt.hardware [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1203.188963] env[69994]: DEBUG nova.virt.hardware [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1203.189100] env[69994]: DEBUG nova.virt.hardware [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1203.189280] env[69994]: DEBUG nova.virt.hardware [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1203.191261] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-172a9a95-3cd4-4b7c-84c6-dd105af5e48d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.199354] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5bd51ba-0e86-4ccb-8212-ef1f08a53067 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.206201] env[69994]: DEBUG nova.compute.manager [req-555e358e-e8ba-488a-85f5-86dad8289550 req-09662618-bab8-4198-9b67-d131ea226ea7 service nova] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Received event network-changed-fc354355-eb77-47cd-9f5b-89c8e6616b1d {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1203.206368] env[69994]: DEBUG nova.compute.manager [req-555e358e-e8ba-488a-85f5-86dad8289550 req-09662618-bab8-4198-9b67-d131ea226ea7 service nova] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Refreshing instance network info cache due to event network-changed-fc354355-eb77-47cd-9f5b-89c8e6616b1d. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1203.206591] env[69994]: DEBUG oslo_concurrency.lockutils [req-555e358e-e8ba-488a-85f5-86dad8289550 req-09662618-bab8-4198-9b67-d131ea226ea7 service nova] Acquiring lock "refresh_cache-03e58b14-12fe-46e5-b483-4176d5a43c0e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1203.206745] env[69994]: DEBUG oslo_concurrency.lockutils [req-555e358e-e8ba-488a-85f5-86dad8289550 req-09662618-bab8-4198-9b67-d131ea226ea7 service nova] Acquired lock "refresh_cache-03e58b14-12fe-46e5-b483-4176d5a43c0e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1203.206911] env[69994]: DEBUG nova.network.neutron [req-555e358e-e8ba-488a-85f5-86dad8289550 req-09662618-bab8-4198-9b67-d131ea226ea7 service nova] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Refreshing network info cache for port fc354355-eb77-47cd-9f5b-89c8e6616b1d {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1203.388510] env[69994]: DEBUG nova.objects.base [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Object Instance<03e58b14-12fe-46e5-b483-4176d5a43c0e> lazy-loaded attributes: resources,numa_topology {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1203.481821] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d27f7c9-fc8d-4b20-8579-1a08cb87febf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.490232] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d258282b-d840-416b-9623-311630d9e9e8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.521052] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-016bf923-083c-42dc-99fb-b641e0727cbc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.530474] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9a05e18-2d04-40de-bbdc-d9379bd820c5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.544359] env[69994]: DEBUG nova.compute.provider_tree [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1203.555152] env[69994]: DEBUG oslo_vmware.rw_handles [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52bf495b-e877-8c36-99b4-0b78caf71efa/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1203.556512] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35c671ed-be2c-41e7-9400-bc1b6cc6e935 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.563792] env[69994]: DEBUG oslo_vmware.rw_handles [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52bf495b-e877-8c36-99b4-0b78caf71efa/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1203.563966] env[69994]: ERROR oslo_vmware.rw_handles [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52bf495b-e877-8c36-99b4-0b78caf71efa/disk-0.vmdk due to incomplete transfer. [ 1203.564365] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-2df6a4b4-f742-4e9b-8b16-404b7f9b4702 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.572390] env[69994]: DEBUG oslo_vmware.rw_handles [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52bf495b-e877-8c36-99b4-0b78caf71efa/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1203.572610] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Uploaded image 4ad28a5f-4da1-4989-b13a-f34046f115ea to the Glance image server {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1203.574415] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Destroying the VM {{(pid=69994) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1203.574875] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-5a6a241a-2f62-48ee-a50e-cac3b36718b7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.580214] env[69994]: DEBUG oslo_vmware.api [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1203.580214] env[69994]: value = "task-3242719" [ 1203.580214] env[69994]: _type = "Task" [ 1203.580214] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.589113] env[69994]: DEBUG oslo_vmware.api [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242719, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.656743] env[69994]: DEBUG nova.network.neutron [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Successfully updated port: a8446ab9-60ee-4fc2-8b75-e53b3b39a38f {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1203.914053] env[69994]: DEBUG nova.network.neutron [req-555e358e-e8ba-488a-85f5-86dad8289550 req-09662618-bab8-4198-9b67-d131ea226ea7 service nova] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Updated VIF entry in instance network info cache for port fc354355-eb77-47cd-9f5b-89c8e6616b1d. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1203.914053] env[69994]: DEBUG nova.network.neutron [req-555e358e-e8ba-488a-85f5-86dad8289550 req-09662618-bab8-4198-9b67-d131ea226ea7 service nova] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Updating instance_info_cache with network_info: [{"id": "fc354355-eb77-47cd-9f5b-89c8e6616b1d", "address": "fa:16:3e:d3:d6:b9", "network": {"id": "24596b1b-4e9c-466d-85f9-ff79760278bd", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1747505141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "352ad5b68db1480eb657935e006d7dbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapfc354355-eb", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1204.048772] env[69994]: DEBUG nova.scheduler.client.report [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1204.089834] env[69994]: DEBUG oslo_vmware.api [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242719, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.158621] env[69994]: DEBUG oslo_concurrency.lockutils [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "refresh_cache-eea243fb-97fc-4c65-8699-1b3c321bd250" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1204.158910] env[69994]: DEBUG oslo_concurrency.lockutils [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquired lock "refresh_cache-eea243fb-97fc-4c65-8699-1b3c321bd250" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1204.158998] env[69994]: DEBUG nova.network.neutron [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1204.417989] env[69994]: DEBUG oslo_concurrency.lockutils [req-555e358e-e8ba-488a-85f5-86dad8289550 req-09662618-bab8-4198-9b67-d131ea226ea7 service nova] Releasing lock "refresh_cache-03e58b14-12fe-46e5-b483-4176d5a43c0e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1204.553905] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.171s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1204.592132] env[69994]: DEBUG oslo_vmware.api [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242719, 'name': Destroy_Task} progress is 33%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.619673] env[69994]: DEBUG oslo_concurrency.lockutils [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquiring lock "03e58b14-12fe-46e5-b483-4176d5a43c0e" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1204.668216] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1204.698369] env[69994]: DEBUG nova.network.neutron [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1204.837056] env[69994]: DEBUG nova.network.neutron [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Updating instance_info_cache with network_info: [{"id": "a8446ab9-60ee-4fc2-8b75-e53b3b39a38f", "address": "fa:16:3e:3d:0c:3b", "network": {"id": "ac8cc640-01c3-4a64-8b69-1458ee2131a1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1685085861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38d5a89ed7c248c3be506ef12caf5f1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8446ab9-60", "ovs_interfaceid": "a8446ab9-60ee-4fc2-8b75-e53b3b39a38f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1205.062257] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6c6d38e8-a16b-4ff5-b4fb-5250ee4ecdc1 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lock "03e58b14-12fe-46e5-b483-4176d5a43c0e" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 22.155s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1205.063504] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "03e58b14-12fe-46e5-b483-4176d5a43c0e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 8.613s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1205.063717] env[69994]: INFO nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] During sync_power_state the instance has a pending task (shelving_image_uploading). Skip. [ 1205.063926] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "03e58b14-12fe-46e5-b483-4176d5a43c0e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1205.064179] env[69994]: DEBUG oslo_concurrency.lockutils [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lock "03e58b14-12fe-46e5-b483-4176d5a43c0e" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.445s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1205.064344] env[69994]: INFO nova.compute.manager [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Unshelving [ 1205.091765] env[69994]: DEBUG oslo_vmware.api [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242719, 'name': Destroy_Task, 'duration_secs': 1.167996} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.091985] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Destroyed the VM [ 1205.092226] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Deleting Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1205.092457] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-f3f3586f-7166-43c0-a8df-4f9b9d39f94f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.098008] env[69994]: DEBUG oslo_vmware.api [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1205.098008] env[69994]: value = "task-3242720" [ 1205.098008] env[69994]: _type = "Task" [ 1205.098008] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.105574] env[69994]: DEBUG oslo_vmware.api [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242720, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.146060] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1205.146285] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1205.146429] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69994) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1205.146899] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1205.230865] env[69994]: DEBUG nova.compute.manager [req-8a45f987-a1fc-4fc7-a92b-365941b48c92 req-0cbbe824-f4e0-4483-8261-b1747f7cdd71 service nova] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Received event network-vif-plugged-a8446ab9-60ee-4fc2-8b75-e53b3b39a38f {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1205.231226] env[69994]: DEBUG oslo_concurrency.lockutils [req-8a45f987-a1fc-4fc7-a92b-365941b48c92 req-0cbbe824-f4e0-4483-8261-b1747f7cdd71 service nova] Acquiring lock "eea243fb-97fc-4c65-8699-1b3c321bd250-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1205.231425] env[69994]: DEBUG oslo_concurrency.lockutils [req-8a45f987-a1fc-4fc7-a92b-365941b48c92 req-0cbbe824-f4e0-4483-8261-b1747f7cdd71 service nova] Lock "eea243fb-97fc-4c65-8699-1b3c321bd250-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1205.231425] env[69994]: DEBUG oslo_concurrency.lockutils [req-8a45f987-a1fc-4fc7-a92b-365941b48c92 req-0cbbe824-f4e0-4483-8261-b1747f7cdd71 service nova] Lock "eea243fb-97fc-4c65-8699-1b3c321bd250-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1205.231609] env[69994]: DEBUG nova.compute.manager [req-8a45f987-a1fc-4fc7-a92b-365941b48c92 req-0cbbe824-f4e0-4483-8261-b1747f7cdd71 service nova] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] No waiting events found dispatching network-vif-plugged-a8446ab9-60ee-4fc2-8b75-e53b3b39a38f {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1205.231777] env[69994]: WARNING nova.compute.manager [req-8a45f987-a1fc-4fc7-a92b-365941b48c92 req-0cbbe824-f4e0-4483-8261-b1747f7cdd71 service nova] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Received unexpected event network-vif-plugged-a8446ab9-60ee-4fc2-8b75-e53b3b39a38f for instance with vm_state building and task_state spawning. [ 1205.232102] env[69994]: DEBUG nova.compute.manager [req-8a45f987-a1fc-4fc7-a92b-365941b48c92 req-0cbbe824-f4e0-4483-8261-b1747f7cdd71 service nova] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Received event network-changed-a8446ab9-60ee-4fc2-8b75-e53b3b39a38f {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1205.232102] env[69994]: DEBUG nova.compute.manager [req-8a45f987-a1fc-4fc7-a92b-365941b48c92 req-0cbbe824-f4e0-4483-8261-b1747f7cdd71 service nova] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Refreshing instance network info cache due to event network-changed-a8446ab9-60ee-4fc2-8b75-e53b3b39a38f. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1205.232264] env[69994]: DEBUG oslo_concurrency.lockutils [req-8a45f987-a1fc-4fc7-a92b-365941b48c92 req-0cbbe824-f4e0-4483-8261-b1747f7cdd71 service nova] Acquiring lock "refresh_cache-eea243fb-97fc-4c65-8699-1b3c321bd250" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1205.339660] env[69994]: DEBUG oslo_concurrency.lockutils [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Releasing lock "refresh_cache-eea243fb-97fc-4c65-8699-1b3c321bd250" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1205.340042] env[69994]: DEBUG nova.compute.manager [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Instance network_info: |[{"id": "a8446ab9-60ee-4fc2-8b75-e53b3b39a38f", "address": "fa:16:3e:3d:0c:3b", "network": {"id": "ac8cc640-01c3-4a64-8b69-1458ee2131a1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1685085861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38d5a89ed7c248c3be506ef12caf5f1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8446ab9-60", "ovs_interfaceid": "a8446ab9-60ee-4fc2-8b75-e53b3b39a38f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1205.340368] env[69994]: DEBUG oslo_concurrency.lockutils [req-8a45f987-a1fc-4fc7-a92b-365941b48c92 req-0cbbe824-f4e0-4483-8261-b1747f7cdd71 service nova] Acquired lock "refresh_cache-eea243fb-97fc-4c65-8699-1b3c321bd250" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1205.340588] env[69994]: DEBUG nova.network.neutron [req-8a45f987-a1fc-4fc7-a92b-365941b48c92 req-0cbbe824-f4e0-4483-8261-b1747f7cdd71 service nova] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Refreshing network info cache for port a8446ab9-60ee-4fc2-8b75-e53b3b39a38f {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1205.341975] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3d:0c:3b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ddfb706a-add1-4e16-9ac4-d20b16a1df6d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a8446ab9-60ee-4fc2-8b75-e53b3b39a38f', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1205.350797] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1205.353557] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1205.354033] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-13c5cc66-c968-4b22-bcbd-15c326d697cf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.374311] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1205.374311] env[69994]: value = "task-3242721" [ 1205.374311] env[69994]: _type = "Task" [ 1205.374311] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.384371] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242721, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.564274] env[69994]: DEBUG nova.network.neutron [req-8a45f987-a1fc-4fc7-a92b-365941b48c92 req-0cbbe824-f4e0-4483-8261-b1747f7cdd71 service nova] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Updated VIF entry in instance network info cache for port a8446ab9-60ee-4fc2-8b75-e53b3b39a38f. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1205.564274] env[69994]: DEBUG nova.network.neutron [req-8a45f987-a1fc-4fc7-a92b-365941b48c92 req-0cbbe824-f4e0-4483-8261-b1747f7cdd71 service nova] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Updating instance_info_cache with network_info: [{"id": "a8446ab9-60ee-4fc2-8b75-e53b3b39a38f", "address": "fa:16:3e:3d:0c:3b", "network": {"id": "ac8cc640-01c3-4a64-8b69-1458ee2131a1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1685085861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38d5a89ed7c248c3be506ef12caf5f1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8446ab9-60", "ovs_interfaceid": "a8446ab9-60ee-4fc2-8b75-e53b3b39a38f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1205.608826] env[69994]: DEBUG oslo_vmware.api [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242720, 'name': RemoveSnapshot_Task, 'duration_secs': 0.335305} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.609106] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Deleted Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1205.609370] env[69994]: DEBUG nova.compute.manager [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1205.610128] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86bfe286-14e8-41cf-893f-7689a6f7cbdd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.650274] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1205.650535] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1205.650725] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1205.650878] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69994) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1205.651922] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42f5aa7d-64ae-4e83-9ae1-778e4a500305 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.659662] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f56bfbfc-20d6-4b7f-9794-b23dcabcb3d7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.673291] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41c71505-ba59-4621-b55b-0581bde95653 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.679383] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1db5170c-4d64-4ec7-b51c-194899fcbe24 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.708333] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179676MB free_disk=120GB free_vcpus=48 pci_devices=None {{(pid=69994) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1205.708496] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1205.708702] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1205.883700] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242721, 'name': CreateVM_Task, 'duration_secs': 0.331788} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.883860] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1205.884697] env[69994]: DEBUG oslo_concurrency.lockutils [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1205.884697] env[69994]: DEBUG oslo_concurrency.lockutils [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1205.885018] env[69994]: DEBUG oslo_concurrency.lockutils [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1205.885261] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-918600a7-38f4-4bec-b388-4a3170201d26 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.890154] env[69994]: DEBUG oslo_vmware.api [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1205.890154] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]529f784d-a5f0-1467-baa3-108e93808b0f" [ 1205.890154] env[69994]: _type = "Task" [ 1205.890154] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.899339] env[69994]: DEBUG oslo_vmware.api [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]529f784d-a5f0-1467-baa3-108e93808b0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.066998] env[69994]: DEBUG oslo_concurrency.lockutils [req-8a45f987-a1fc-4fc7-a92b-365941b48c92 req-0cbbe824-f4e0-4483-8261-b1747f7cdd71 service nova] Releasing lock "refresh_cache-eea243fb-97fc-4c65-8699-1b3c321bd250" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1206.072647] env[69994]: DEBUG nova.compute.utils [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1206.121440] env[69994]: INFO nova.compute.manager [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Shelve offloading [ 1206.400663] env[69994]: DEBUG oslo_vmware.api [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]529f784d-a5f0-1467-baa3-108e93808b0f, 'name': SearchDatastore_Task, 'duration_secs': 0.009088} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.400997] env[69994]: DEBUG oslo_concurrency.lockutils [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1206.401210] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1206.401445] env[69994]: DEBUG oslo_concurrency.lockutils [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1206.401590] env[69994]: DEBUG oslo_concurrency.lockutils [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1206.401769] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1206.402038] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9aedfbc0-ff19-4027-a815-43629f771a45 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.409974] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1206.410167] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1206.410836] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b673906-0832-4f4c-a1f3-6048b9bb283f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.415650] env[69994]: DEBUG oslo_vmware.api [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1206.415650] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]521612a5-57a8-3f0d-a7fb-686e3a7f570b" [ 1206.415650] env[69994]: _type = "Task" [ 1206.415650] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.423124] env[69994]: DEBUG oslo_vmware.api [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521612a5-57a8-3f0d-a7fb-686e3a7f570b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.576196] env[69994]: INFO nova.virt.block_device [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Booting with volume 7f73fdd9-c95c-42cc-91c9-d54e30fd2995 at /dev/sdb [ 1206.610875] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-91989d52-46a9-477e-8f96-aeb7edbdff6f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.620451] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49eb3da5-83e6-4917-b0b2-15b2ae22ed2c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.630969] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1206.631215] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-53251ad1-d616-4ef0-98ca-049a4f6de9d1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.637693] env[69994]: DEBUG oslo_vmware.api [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1206.637693] env[69994]: value = "task-3242722" [ 1206.637693] env[69994]: _type = "Task" [ 1206.637693] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.651202] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4738e9fe-0689-4842-bc44-76be87185645 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.652768] env[69994]: DEBUG oslo_vmware.api [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242722, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.659149] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dda2e6f-cfd5-4102-8d19-9bb56ae4ad5e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.686744] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-109c9c50-412d-4250-a6f0-8fbea641e88d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.693171] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d6f21be-50c3-4064-bacc-8f1edabee05a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.707261] env[69994]: DEBUG nova.virt.block_device [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Updating existing volume attachment record: 8aa6cb0c-bab9-4078-8e9b-c99ffb3ee5e4 {{(pid=69994) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1206.733762] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance ef37ce64-2c26-4080-899a-6d9dbb5850c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1206.733920] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance be421d40-9859-4e0d-aef8-a2feb8717a78 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1206.734060] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance d31f167f-8248-4aef-aa3c-6bc7259e1a80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1206.734186] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 70fcf5b1-213f-4ff9-b675-282e7aa30e20 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1206.734322] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance eea243fb-97fc-4c65-8699-1b3c321bd250 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1206.925418] env[69994]: DEBUG oslo_vmware.api [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521612a5-57a8-3f0d-a7fb-686e3a7f570b, 'name': SearchDatastore_Task, 'duration_secs': 0.009096} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.926270] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c217825-5327-41b8-8ca8-a495ad6335af {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.930997] env[69994]: DEBUG oslo_vmware.api [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1206.930997] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]527a5629-cb65-bb6a-f360-741457f4c257" [ 1206.930997] env[69994]: _type = "Task" [ 1206.930997] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.938462] env[69994]: DEBUG oslo_vmware.api [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]527a5629-cb65-bb6a-f360-741457f4c257, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.148527] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] VM already powered off {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1207.148722] env[69994]: DEBUG nova.compute.manager [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1207.149431] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-440d7bd4-9f9a-4017-8e95-50365fc2f8b9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.154949] env[69994]: DEBUG oslo_concurrency.lockutils [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "refresh_cache-d31f167f-8248-4aef-aa3c-6bc7259e1a80" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1207.155144] env[69994]: DEBUG oslo_concurrency.lockutils [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquired lock "refresh_cache-d31f167f-8248-4aef-aa3c-6bc7259e1a80" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1207.155326] env[69994]: DEBUG nova.network.neutron [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1207.237149] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 03e58b14-12fe-46e5-b483-4176d5a43c0e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1207.237381] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1207.237530] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1207.316349] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81f10cb3-bf37-4865-99d5-23149d412719 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.324258] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb21b22f-482c-416c-9572-2544c5890a95 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.353654] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0576260e-78ba-4c7a-b0b2-eeb6e867d710 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.360957] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2287847-7162-4c07-a9fb-d859f8d432e1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.374962] env[69994]: DEBUG nova.compute.provider_tree [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1207.441174] env[69994]: DEBUG oslo_vmware.api [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]527a5629-cb65-bb6a-f360-741457f4c257, 'name': SearchDatastore_Task, 'duration_secs': 0.008785} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.441482] env[69994]: DEBUG oslo_concurrency.lockutils [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1207.441672] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] eea243fb-97fc-4c65-8699-1b3c321bd250/eea243fb-97fc-4c65-8699-1b3c321bd250.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1207.441920] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ffe8cbd7-7415-4b6f-a339-0df1a3decbe2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.449279] env[69994]: DEBUG oslo_vmware.api [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1207.449279] env[69994]: value = "task-3242726" [ 1207.449279] env[69994]: _type = "Task" [ 1207.449279] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.457405] env[69994]: DEBUG oslo_vmware.api [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242726, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.877807] env[69994]: DEBUG nova.scheduler.client.report [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1207.895650] env[69994]: DEBUG nova.network.neutron [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Updating instance_info_cache with network_info: [{"id": "9e330706-3213-4a99-b48a-d2e09db34453", "address": "fa:16:3e:fb:d4:c8", "network": {"id": "6d4a143e-4f55-4918-8454-462892aacf0e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1594130263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "605d72502cc644bfa4d875bf348246de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52e117d3-d120-42c6-8e72-70085845acbf", "external-id": "nsx-vlan-transportzone-934", "segmentation_id": 934, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e330706-32", "ovs_interfaceid": "9e330706-3213-4a99-b48a-d2e09db34453", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1207.959034] env[69994]: DEBUG oslo_vmware.api [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242726, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.439018} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.959367] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] eea243fb-97fc-4c65-8699-1b3c321bd250/eea243fb-97fc-4c65-8699-1b3c321bd250.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1207.959593] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1207.959854] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ed902e61-a416-4a0c-8d6c-dce532f794d6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.965397] env[69994]: DEBUG oslo_vmware.api [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1207.965397] env[69994]: value = "task-3242727" [ 1207.965397] env[69994]: _type = "Task" [ 1207.965397] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.972863] env[69994]: DEBUG oslo_vmware.api [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242727, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.383177] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69994) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1208.383370] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.675s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1208.398109] env[69994]: DEBUG oslo_concurrency.lockutils [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Releasing lock "refresh_cache-d31f167f-8248-4aef-aa3c-6bc7259e1a80" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1208.475720] env[69994]: DEBUG oslo_vmware.api [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242727, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.056275} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.478048] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1208.478832] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44aa14b7-f2cd-4a48-b768-b62a1f269464 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.501083] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] eea243fb-97fc-4c65-8699-1b3c321bd250/eea243fb-97fc-4c65-8699-1b3c321bd250.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1208.502165] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-672193a6-40c7-4f9a-961c-e4c0d5cab65e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.520292] env[69994]: DEBUG oslo_vmware.api [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1208.520292] env[69994]: value = "task-3242728" [ 1208.520292] env[69994]: _type = "Task" [ 1208.520292] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.527984] env[69994]: DEBUG oslo_vmware.api [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242728, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.637510] env[69994]: DEBUG nova.compute.manager [req-46719888-2092-4bac-a2b4-940daac25378 req-9e60881b-c92e-4e2e-b18c-28adf4ca636d service nova] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Received event network-vif-unplugged-9e330706-3213-4a99-b48a-d2e09db34453 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1208.637702] env[69994]: DEBUG oslo_concurrency.lockutils [req-46719888-2092-4bac-a2b4-940daac25378 req-9e60881b-c92e-4e2e-b18c-28adf4ca636d service nova] Acquiring lock "d31f167f-8248-4aef-aa3c-6bc7259e1a80-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1208.637910] env[69994]: DEBUG oslo_concurrency.lockutils [req-46719888-2092-4bac-a2b4-940daac25378 req-9e60881b-c92e-4e2e-b18c-28adf4ca636d service nova] Lock "d31f167f-8248-4aef-aa3c-6bc7259e1a80-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1208.638091] env[69994]: DEBUG oslo_concurrency.lockutils [req-46719888-2092-4bac-a2b4-940daac25378 req-9e60881b-c92e-4e2e-b18c-28adf4ca636d service nova] Lock "d31f167f-8248-4aef-aa3c-6bc7259e1a80-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1208.638265] env[69994]: DEBUG nova.compute.manager [req-46719888-2092-4bac-a2b4-940daac25378 req-9e60881b-c92e-4e2e-b18c-28adf4ca636d service nova] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] No waiting events found dispatching network-vif-unplugged-9e330706-3213-4a99-b48a-d2e09db34453 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1208.638434] env[69994]: WARNING nova.compute.manager [req-46719888-2092-4bac-a2b4-940daac25378 req-9e60881b-c92e-4e2e-b18c-28adf4ca636d service nova] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Received unexpected event network-vif-unplugged-9e330706-3213-4a99-b48a-d2e09db34453 for instance with vm_state shelved and task_state shelving_offloading. [ 1208.728904] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1208.729833] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4886c4e2-27fe-4762-9f63-8c1d5cbb20e4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.737780] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1208.738017] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1044cf29-b2a3-4f46-8414-3fa4849b20eb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.820773] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1208.821076] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1208.821283] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Deleting the datastore file [datastore1] d31f167f-8248-4aef-aa3c-6bc7259e1a80 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1208.821595] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1d3669c6-e2f1-4455-b4a1-f7bdbe6df165 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.827924] env[69994]: DEBUG oslo_vmware.api [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1208.827924] env[69994]: value = "task-3242730" [ 1208.827924] env[69994]: _type = "Task" [ 1208.827924] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.837365] env[69994]: DEBUG oslo_vmware.api [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242730, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.030384] env[69994]: DEBUG oslo_vmware.api [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242728, 'name': ReconfigVM_Task, 'duration_secs': 0.269744} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.030667] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Reconfigured VM instance instance-0000006c to attach disk [datastore2] eea243fb-97fc-4c65-8699-1b3c321bd250/eea243fb-97fc-4c65-8699-1b3c321bd250.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1209.031307] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-36ee4550-aa5a-4656-8898-b4afe1f42b88 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.037302] env[69994]: DEBUG oslo_vmware.api [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1209.037302] env[69994]: value = "task-3242732" [ 1209.037302] env[69994]: _type = "Task" [ 1209.037302] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.044723] env[69994]: DEBUG oslo_vmware.api [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242732, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.337965] env[69994]: DEBUG oslo_vmware.api [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242730, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.135188} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.338235] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1209.338410] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1209.338584] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1209.360447] env[69994]: INFO nova.scheduler.client.report [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Deleted allocations for instance d31f167f-8248-4aef-aa3c-6bc7259e1a80 [ 1209.383701] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1209.383913] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1209.384090] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1209.384241] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1209.548422] env[69994]: DEBUG oslo_vmware.api [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242732, 'name': Rename_Task, 'duration_secs': 0.130291} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.549298] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1209.549298] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b269c743-b42f-4a20-ba24-cae032cae65e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.555146] env[69994]: DEBUG oslo_vmware.api [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1209.555146] env[69994]: value = "task-3242733" [ 1209.555146] env[69994]: _type = "Task" [ 1209.555146] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.562861] env[69994]: DEBUG oslo_vmware.api [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242733, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.865460] env[69994]: DEBUG oslo_concurrency.lockutils [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.865771] env[69994]: DEBUG oslo_concurrency.lockutils [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1209.866109] env[69994]: DEBUG nova.objects.instance [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lazy-loading 'resources' on Instance uuid d31f167f-8248-4aef-aa3c-6bc7259e1a80 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1210.065797] env[69994]: DEBUG oslo_vmware.api [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242733, 'name': PowerOnVM_Task, 'duration_secs': 0.455544} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.066120] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1210.066329] env[69994]: INFO nova.compute.manager [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Took 6.91 seconds to spawn the instance on the hypervisor. [ 1210.066502] env[69994]: DEBUG nova.compute.manager [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1210.067275] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14ce3d69-b07d-45db-9aa4-ceab127ac753 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.368513] env[69994]: DEBUG nova.objects.instance [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lazy-loading 'numa_topology' on Instance uuid d31f167f-8248-4aef-aa3c-6bc7259e1a80 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1210.585667] env[69994]: INFO nova.compute.manager [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Took 11.63 seconds to build instance. [ 1210.663897] env[69994]: DEBUG nova.compute.manager [req-bb754767-3b2c-4c18-85a2-207be164536d req-7d851638-2751-4f0b-8676-b072ccb3b209 service nova] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Received event network-changed-9e330706-3213-4a99-b48a-d2e09db34453 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1210.664108] env[69994]: DEBUG nova.compute.manager [req-bb754767-3b2c-4c18-85a2-207be164536d req-7d851638-2751-4f0b-8676-b072ccb3b209 service nova] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Refreshing instance network info cache due to event network-changed-9e330706-3213-4a99-b48a-d2e09db34453. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1210.664326] env[69994]: DEBUG oslo_concurrency.lockutils [req-bb754767-3b2c-4c18-85a2-207be164536d req-7d851638-2751-4f0b-8676-b072ccb3b209 service nova] Acquiring lock "refresh_cache-d31f167f-8248-4aef-aa3c-6bc7259e1a80" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1210.664470] env[69994]: DEBUG oslo_concurrency.lockutils [req-bb754767-3b2c-4c18-85a2-207be164536d req-7d851638-2751-4f0b-8676-b072ccb3b209 service nova] Acquired lock "refresh_cache-d31f167f-8248-4aef-aa3c-6bc7259e1a80" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1210.664627] env[69994]: DEBUG nova.network.neutron [req-bb754767-3b2c-4c18-85a2-207be164536d req-7d851638-2751-4f0b-8676-b072ccb3b209 service nova] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Refreshing network info cache for port 9e330706-3213-4a99-b48a-d2e09db34453 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1210.871643] env[69994]: DEBUG nova.objects.base [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1210.903599] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "d31f167f-8248-4aef-aa3c-6bc7259e1a80" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1210.964868] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-657800bc-e583-4967-b9dd-63a198bfe173 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.973262] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f0cbcc3-c67e-4a9c-9d0f-60c99b5534ad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.003794] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ec199ce-cbab-4da5-91b6-583d8829a1fb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.012266] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a03548f-f71a-4ee2-bda8-1558a7972815 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.026239] env[69994]: DEBUG nova.compute.provider_tree [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1211.088723] env[69994]: DEBUG oslo_concurrency.lockutils [None req-31b1c253-87e7-4176-8ffa-96891aee1d71 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "eea243fb-97fc-4c65-8699-1b3c321bd250" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.142s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1211.369771] env[69994]: DEBUG nova.compute.manager [req-d56819ba-2560-4700-9ece-5fd7c8eff50b req-d2c2af5a-e527-427d-bfef-66db45f8382f service nova] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Received event network-changed-a8446ab9-60ee-4fc2-8b75-e53b3b39a38f {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1211.370039] env[69994]: DEBUG nova.compute.manager [req-d56819ba-2560-4700-9ece-5fd7c8eff50b req-d2c2af5a-e527-427d-bfef-66db45f8382f service nova] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Refreshing instance network info cache due to event network-changed-a8446ab9-60ee-4fc2-8b75-e53b3b39a38f. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1211.370248] env[69994]: DEBUG oslo_concurrency.lockutils [req-d56819ba-2560-4700-9ece-5fd7c8eff50b req-d2c2af5a-e527-427d-bfef-66db45f8382f service nova] Acquiring lock "refresh_cache-eea243fb-97fc-4c65-8699-1b3c321bd250" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1211.370323] env[69994]: DEBUG oslo_concurrency.lockutils [req-d56819ba-2560-4700-9ece-5fd7c8eff50b req-d2c2af5a-e527-427d-bfef-66db45f8382f service nova] Acquired lock "refresh_cache-eea243fb-97fc-4c65-8699-1b3c321bd250" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1211.370476] env[69994]: DEBUG nova.network.neutron [req-d56819ba-2560-4700-9ece-5fd7c8eff50b req-d2c2af5a-e527-427d-bfef-66db45f8382f service nova] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Refreshing network info cache for port a8446ab9-60ee-4fc2-8b75-e53b3b39a38f {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1211.372211] env[69994]: DEBUG nova.network.neutron [req-bb754767-3b2c-4c18-85a2-207be164536d req-7d851638-2751-4f0b-8676-b072ccb3b209 service nova] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Updated VIF entry in instance network info cache for port 9e330706-3213-4a99-b48a-d2e09db34453. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1211.372547] env[69994]: DEBUG nova.network.neutron [req-bb754767-3b2c-4c18-85a2-207be164536d req-7d851638-2751-4f0b-8676-b072ccb3b209 service nova] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Updating instance_info_cache with network_info: [{"id": "9e330706-3213-4a99-b48a-d2e09db34453", "address": "fa:16:3e:fb:d4:c8", "network": {"id": "6d4a143e-4f55-4918-8454-462892aacf0e", "bridge": null, "label": "tempest-ServerActionsTestOtherB-1594130263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "605d72502cc644bfa4d875bf348246de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap9e330706-32", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1211.529275] env[69994]: DEBUG nova.scheduler.client.report [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1211.875499] env[69994]: DEBUG oslo_concurrency.lockutils [req-bb754767-3b2c-4c18-85a2-207be164536d req-7d851638-2751-4f0b-8676-b072ccb3b209 service nova] Releasing lock "refresh_cache-d31f167f-8248-4aef-aa3c-6bc7259e1a80" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1212.034337] env[69994]: DEBUG oslo_concurrency.lockutils [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.168s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1212.111842] env[69994]: DEBUG nova.network.neutron [req-d56819ba-2560-4700-9ece-5fd7c8eff50b req-d2c2af5a-e527-427d-bfef-66db45f8382f service nova] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Updated VIF entry in instance network info cache for port a8446ab9-60ee-4fc2-8b75-e53b3b39a38f. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1212.112231] env[69994]: DEBUG nova.network.neutron [req-d56819ba-2560-4700-9ece-5fd7c8eff50b req-d2c2af5a-e527-427d-bfef-66db45f8382f service nova] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Updating instance_info_cache with network_info: [{"id": "a8446ab9-60ee-4fc2-8b75-e53b3b39a38f", "address": "fa:16:3e:3d:0c:3b", "network": {"id": "ac8cc640-01c3-4a64-8b69-1458ee2131a1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1685085861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38d5a89ed7c248c3be506ef12caf5f1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8446ab9-60", "ovs_interfaceid": "a8446ab9-60ee-4fc2-8b75-e53b3b39a38f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1212.302298] env[69994]: DEBUG oslo_concurrency.lockutils [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1212.302583] env[69994]: DEBUG oslo_concurrency.lockutils [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1212.302842] env[69994]: DEBUG nova.objects.instance [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lazy-loading 'pci_requests' on Instance uuid 03e58b14-12fe-46e5-b483-4176d5a43c0e {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1212.543440] env[69994]: DEBUG oslo_concurrency.lockutils [None req-69e49b71-8473-4457-a1ea-ca7adf943435 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "d31f167f-8248-4aef-aa3c-6bc7259e1a80" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 25.287s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1212.544432] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "d31f167f-8248-4aef-aa3c-6bc7259e1a80" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 16.094s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1212.544580] env[69994]: INFO nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] During sync_power_state the instance has a pending task (shelving_image_uploading). Skip. [ 1212.544674] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "d31f167f-8248-4aef-aa3c-6bc7259e1a80" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1212.545204] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "d31f167f-8248-4aef-aa3c-6bc7259e1a80" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.642s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1212.545391] env[69994]: INFO nova.compute.manager [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Unshelving [ 1212.615807] env[69994]: DEBUG oslo_concurrency.lockutils [req-d56819ba-2560-4700-9ece-5fd7c8eff50b req-d2c2af5a-e527-427d-bfef-66db45f8382f service nova] Releasing lock "refresh_cache-eea243fb-97fc-4c65-8699-1b3c321bd250" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1212.806967] env[69994]: DEBUG nova.objects.instance [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lazy-loading 'numa_topology' on Instance uuid 03e58b14-12fe-46e5-b483-4176d5a43c0e {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1213.309922] env[69994]: INFO nova.compute.claims [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1213.566567] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1214.396077] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9917431-5dcb-4190-b4d9-b999e61e619c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.404119] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cb007bc-37d3-4059-aa70-105347724b27 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.435405] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d8ce5a2-5ae5-4570-92ed-352632d07bd8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.442550] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3331decd-207e-49d8-a38d-a49280573735 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.455468] env[69994]: DEBUG nova.compute.provider_tree [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1214.958806] env[69994]: DEBUG nova.scheduler.client.report [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1215.463990] env[69994]: DEBUG oslo_concurrency.lockutils [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.161s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1215.466286] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.900s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1215.466505] env[69994]: DEBUG nova.objects.instance [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lazy-loading 'pci_requests' on Instance uuid d31f167f-8248-4aef-aa3c-6bc7259e1a80 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1215.493195] env[69994]: INFO nova.network.neutron [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Updating port fc354355-eb77-47cd-9f5b-89c8e6616b1d with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1215.972541] env[69994]: DEBUG nova.objects.instance [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lazy-loading 'numa_topology' on Instance uuid d31f167f-8248-4aef-aa3c-6bc7259e1a80 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1216.474759] env[69994]: INFO nova.compute.claims [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1216.922355] env[69994]: DEBUG nova.compute.manager [req-e104c7a5-9824-43c1-95d3-34cc6d0ca2ed req-073598a5-ad24-4579-bff4-d24372759be3 service nova] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Received event network-vif-plugged-fc354355-eb77-47cd-9f5b-89c8e6616b1d {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1216.922578] env[69994]: DEBUG oslo_concurrency.lockutils [req-e104c7a5-9824-43c1-95d3-34cc6d0ca2ed req-073598a5-ad24-4579-bff4-d24372759be3 service nova] Acquiring lock "03e58b14-12fe-46e5-b483-4176d5a43c0e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1216.922805] env[69994]: DEBUG oslo_concurrency.lockutils [req-e104c7a5-9824-43c1-95d3-34cc6d0ca2ed req-073598a5-ad24-4579-bff4-d24372759be3 service nova] Lock "03e58b14-12fe-46e5-b483-4176d5a43c0e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1216.922969] env[69994]: DEBUG oslo_concurrency.lockutils [req-e104c7a5-9824-43c1-95d3-34cc6d0ca2ed req-073598a5-ad24-4579-bff4-d24372759be3 service nova] Lock "03e58b14-12fe-46e5-b483-4176d5a43c0e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1216.923173] env[69994]: DEBUG nova.compute.manager [req-e104c7a5-9824-43c1-95d3-34cc6d0ca2ed req-073598a5-ad24-4579-bff4-d24372759be3 service nova] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] No waiting events found dispatching network-vif-plugged-fc354355-eb77-47cd-9f5b-89c8e6616b1d {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1216.923298] env[69994]: WARNING nova.compute.manager [req-e104c7a5-9824-43c1-95d3-34cc6d0ca2ed req-073598a5-ad24-4579-bff4-d24372759be3 service nova] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Received unexpected event network-vif-plugged-fc354355-eb77-47cd-9f5b-89c8e6616b1d for instance with vm_state shelved_offloaded and task_state spawning. [ 1216.973461] env[69994]: DEBUG oslo_concurrency.lockutils [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquiring lock "refresh_cache-03e58b14-12fe-46e5-b483-4176d5a43c0e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1216.973597] env[69994]: DEBUG oslo_concurrency.lockutils [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquired lock "refresh_cache-03e58b14-12fe-46e5-b483-4176d5a43c0e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1216.973833] env[69994]: DEBUG nova.network.neutron [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1217.572274] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e7cb5ae-4df9-4a3b-bf8d-5287f63c419f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.579844] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90eb72a1-de11-4bc5-9915-58bfb0770ea1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.614042] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b55a83bf-5081-44c6-a752-6e59664ba6c4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.621464] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a565643-ee29-4b90-8a6c-785f37111589 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.634718] env[69994]: DEBUG nova.compute.provider_tree [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1217.735955] env[69994]: DEBUG nova.network.neutron [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Updating instance_info_cache with network_info: [{"id": "fc354355-eb77-47cd-9f5b-89c8e6616b1d", "address": "fa:16:3e:d3:d6:b9", "network": {"id": "24596b1b-4e9c-466d-85f9-ff79760278bd", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1747505141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "352ad5b68db1480eb657935e006d7dbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc354355-eb", "ovs_interfaceid": "fc354355-eb77-47cd-9f5b-89c8e6616b1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1218.139579] env[69994]: DEBUG nova.scheduler.client.report [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1218.239115] env[69994]: DEBUG oslo_concurrency.lockutils [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Releasing lock "refresh_cache-03e58b14-12fe-46e5-b483-4176d5a43c0e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1218.267362] env[69994]: DEBUG nova.virt.hardware [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='e9a301b0e20da299c8d308c8d1fa5377',container_format='bare',created_at=2025-04-03T08:48:40Z,direct_url=,disk_format='vmdk',id=c19374ad-1dca-4b03-a7af-507e74541e09,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-955705093-shelved',owner='352ad5b68db1480eb657935e006d7dbb',properties=ImageMetaProps,protected=,size=31666176,status='active',tags=,updated_at=2025-04-03T08:48:55Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1218.267626] env[69994]: DEBUG nova.virt.hardware [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1218.267804] env[69994]: DEBUG nova.virt.hardware [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1218.267989] env[69994]: DEBUG nova.virt.hardware [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1218.268151] env[69994]: DEBUG nova.virt.hardware [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1218.268297] env[69994]: DEBUG nova.virt.hardware [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1218.268500] env[69994]: DEBUG nova.virt.hardware [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1218.268662] env[69994]: DEBUG nova.virt.hardware [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1218.268831] env[69994]: DEBUG nova.virt.hardware [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1218.268996] env[69994]: DEBUG nova.virt.hardware [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1218.269188] env[69994]: DEBUG nova.virt.hardware [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1218.270111] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd3f1584-0891-45bf-a83c-e86cd7b37091 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.277986] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72792d11-fab5-48e7-af82-bf3cced7e536 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.291542] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:d6:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24ec44b7-0acf-4ff9-8bb3-4641b74af7a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fc354355-eb77-47cd-9f5b-89c8e6616b1d', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1218.298952] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1218.299184] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1218.299377] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-da48e744-65af-4d38-9dd0-6b3598aaf89b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.317543] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1218.317543] env[69994]: value = "task-3242734" [ 1218.317543] env[69994]: _type = "Task" [ 1218.317543] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.324591] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242734, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.644915] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.178s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1218.673740] env[69994]: INFO nova.network.neutron [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Updating port 9e330706-3213-4a99-b48a-d2e09db34453 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1218.827429] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242734, 'name': CreateVM_Task, 'duration_secs': 0.34927} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.827519] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1218.834935] env[69994]: DEBUG oslo_concurrency.lockutils [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c19374ad-1dca-4b03-a7af-507e74541e09" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1218.835150] env[69994]: DEBUG oslo_concurrency.lockutils [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c19374ad-1dca-4b03-a7af-507e74541e09" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1218.835546] env[69994]: DEBUG oslo_concurrency.lockutils [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c19374ad-1dca-4b03-a7af-507e74541e09" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1218.835802] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb762718-d7c3-42f9-8d86-adde47657758 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.840645] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1218.840645] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5212de72-567f-1f64-48c9-de2ba776db9e" [ 1218.840645] env[69994]: _type = "Task" [ 1218.840645] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.848732] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5212de72-567f-1f64-48c9-de2ba776db9e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.948565] env[69994]: DEBUG nova.compute.manager [req-75c03bcd-6006-4fb4-a827-f4f0c6dff37f req-45cef26d-e4ec-41bc-af84-f9ef7ce428ac service nova] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Received event network-changed-fc354355-eb77-47cd-9f5b-89c8e6616b1d {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1218.948799] env[69994]: DEBUG nova.compute.manager [req-75c03bcd-6006-4fb4-a827-f4f0c6dff37f req-45cef26d-e4ec-41bc-af84-f9ef7ce428ac service nova] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Refreshing instance network info cache due to event network-changed-fc354355-eb77-47cd-9f5b-89c8e6616b1d. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1218.949054] env[69994]: DEBUG oslo_concurrency.lockutils [req-75c03bcd-6006-4fb4-a827-f4f0c6dff37f req-45cef26d-e4ec-41bc-af84-f9ef7ce428ac service nova] Acquiring lock "refresh_cache-03e58b14-12fe-46e5-b483-4176d5a43c0e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1218.949209] env[69994]: DEBUG oslo_concurrency.lockutils [req-75c03bcd-6006-4fb4-a827-f4f0c6dff37f req-45cef26d-e4ec-41bc-af84-f9ef7ce428ac service nova] Acquired lock "refresh_cache-03e58b14-12fe-46e5-b483-4176d5a43c0e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1218.949370] env[69994]: DEBUG nova.network.neutron [req-75c03bcd-6006-4fb4-a827-f4f0c6dff37f req-45cef26d-e4ec-41bc-af84-f9ef7ce428ac service nova] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Refreshing network info cache for port fc354355-eb77-47cd-9f5b-89c8e6616b1d {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1219.351463] env[69994]: DEBUG oslo_concurrency.lockutils [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c19374ad-1dca-4b03-a7af-507e74541e09" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1219.351711] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Processing image c19374ad-1dca-4b03-a7af-507e74541e09 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1219.351953] env[69994]: DEBUG oslo_concurrency.lockutils [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c19374ad-1dca-4b03-a7af-507e74541e09/c19374ad-1dca-4b03-a7af-507e74541e09.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1219.352115] env[69994]: DEBUG oslo_concurrency.lockutils [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c19374ad-1dca-4b03-a7af-507e74541e09/c19374ad-1dca-4b03-a7af-507e74541e09.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1219.352292] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1219.352535] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-56e7b49e-8b20-48d7-ab2f-e57a16506e0c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.360617] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1219.360788] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1219.361450] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc8c399b-6502-4a8a-8c9d-251a0f467f99 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.366082] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1219.366082] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]527eecca-f669-29e8-6f41-80d394edce12" [ 1219.366082] env[69994]: _type = "Task" [ 1219.366082] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.373021] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]527eecca-f669-29e8-6f41-80d394edce12, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.636279] env[69994]: DEBUG nova.network.neutron [req-75c03bcd-6006-4fb4-a827-f4f0c6dff37f req-45cef26d-e4ec-41bc-af84-f9ef7ce428ac service nova] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Updated VIF entry in instance network info cache for port fc354355-eb77-47cd-9f5b-89c8e6616b1d. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1219.636626] env[69994]: DEBUG nova.network.neutron [req-75c03bcd-6006-4fb4-a827-f4f0c6dff37f req-45cef26d-e4ec-41bc-af84-f9ef7ce428ac service nova] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Updating instance_info_cache with network_info: [{"id": "fc354355-eb77-47cd-9f5b-89c8e6616b1d", "address": "fa:16:3e:d3:d6:b9", "network": {"id": "24596b1b-4e9c-466d-85f9-ff79760278bd", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1747505141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "352ad5b68db1480eb657935e006d7dbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc354355-eb", "ovs_interfaceid": "fc354355-eb77-47cd-9f5b-89c8e6616b1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1219.876637] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Preparing fetch location {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1219.876914] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Fetch image to [datastore1] OSTACK_IMG_b5717ac7-2d34-446d-b481-70467b02339d/OSTACK_IMG_b5717ac7-2d34-446d-b481-70467b02339d.vmdk {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1219.877112] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Downloading stream optimized image c19374ad-1dca-4b03-a7af-507e74541e09 to [datastore1] OSTACK_IMG_b5717ac7-2d34-446d-b481-70467b02339d/OSTACK_IMG_b5717ac7-2d34-446d-b481-70467b02339d.vmdk on the data store datastore1 as vApp {{(pid=69994) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1219.877284] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Downloading image file data c19374ad-1dca-4b03-a7af-507e74541e09 to the ESX as VM named 'OSTACK_IMG_b5717ac7-2d34-446d-b481-70467b02339d' {{(pid=69994) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1219.944359] env[69994]: DEBUG oslo_vmware.rw_handles [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1219.944359] env[69994]: value = "resgroup-9" [ 1219.944359] env[69994]: _type = "ResourcePool" [ 1219.944359] env[69994]: }. {{(pid=69994) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1219.944642] env[69994]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-a96140d2-0f00-4a36-9217-cbfa8177d8ac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.965085] env[69994]: DEBUG oslo_vmware.rw_handles [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lease: (returnval){ [ 1219.965085] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52dd208f-92b7-fbbf-01e0-83356e145e18" [ 1219.965085] env[69994]: _type = "HttpNfcLease" [ 1219.965085] env[69994]: } obtained for vApp import into resource pool (val){ [ 1219.965085] env[69994]: value = "resgroup-9" [ 1219.965085] env[69994]: _type = "ResourcePool" [ 1219.965085] env[69994]: }. {{(pid=69994) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1219.965349] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the lease: (returnval){ [ 1219.965349] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52dd208f-92b7-fbbf-01e0-83356e145e18" [ 1219.965349] env[69994]: _type = "HttpNfcLease" [ 1219.965349] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1219.971440] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1219.971440] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52dd208f-92b7-fbbf-01e0-83356e145e18" [ 1219.971440] env[69994]: _type = "HttpNfcLease" [ 1219.971440] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1220.126275] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "refresh_cache-d31f167f-8248-4aef-aa3c-6bc7259e1a80" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1220.126275] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquired lock "refresh_cache-d31f167f-8248-4aef-aa3c-6bc7259e1a80" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1220.126465] env[69994]: DEBUG nova.network.neutron [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1220.139248] env[69994]: DEBUG oslo_concurrency.lockutils [req-75c03bcd-6006-4fb4-a827-f4f0c6dff37f req-45cef26d-e4ec-41bc-af84-f9ef7ce428ac service nova] Releasing lock "refresh_cache-03e58b14-12fe-46e5-b483-4176d5a43c0e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1220.474226] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1220.474226] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52dd208f-92b7-fbbf-01e0-83356e145e18" [ 1220.474226] env[69994]: _type = "HttpNfcLease" [ 1220.474226] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1220.474520] env[69994]: DEBUG oslo_vmware.rw_handles [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1220.474520] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52dd208f-92b7-fbbf-01e0-83356e145e18" [ 1220.474520] env[69994]: _type = "HttpNfcLease" [ 1220.474520] env[69994]: }. {{(pid=69994) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1220.475298] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bd739f0-7fb8-4ea7-aa2d-10f610125424 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.482412] env[69994]: DEBUG oslo_vmware.rw_handles [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520f7d46-d308-7529-ec81-da6d0307d1b9/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1220.482583] env[69994]: DEBUG oslo_vmware.rw_handles [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Creating HTTP connection to write to file with size = 31666176 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520f7d46-d308-7529-ec81-da6d0307d1b9/disk-0.vmdk. {{(pid=69994) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1220.548030] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e5ec558e-73f8-42f8-9954-0c04a3d4481e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.838264] env[69994]: DEBUG nova.network.neutron [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Updating instance_info_cache with network_info: [{"id": "9e330706-3213-4a99-b48a-d2e09db34453", "address": "fa:16:3e:fb:d4:c8", "network": {"id": "6d4a143e-4f55-4918-8454-462892aacf0e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1594130263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "605d72502cc644bfa4d875bf348246de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52e117d3-d120-42c6-8e72-70085845acbf", "external-id": "nsx-vlan-transportzone-934", "segmentation_id": 934, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e330706-32", "ovs_interfaceid": "9e330706-3213-4a99-b48a-d2e09db34453", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1220.977144] env[69994]: DEBUG nova.compute.manager [req-d88c7f58-1e37-4258-b827-5aaa53948194 req-52208d52-2f7e-401d-a700-1577ff931102 service nova] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Received event network-vif-plugged-9e330706-3213-4a99-b48a-d2e09db34453 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1220.977438] env[69994]: DEBUG oslo_concurrency.lockutils [req-d88c7f58-1e37-4258-b827-5aaa53948194 req-52208d52-2f7e-401d-a700-1577ff931102 service nova] Acquiring lock "d31f167f-8248-4aef-aa3c-6bc7259e1a80-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1220.977928] env[69994]: DEBUG oslo_concurrency.lockutils [req-d88c7f58-1e37-4258-b827-5aaa53948194 req-52208d52-2f7e-401d-a700-1577ff931102 service nova] Lock "d31f167f-8248-4aef-aa3c-6bc7259e1a80-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1220.977928] env[69994]: DEBUG oslo_concurrency.lockutils [req-d88c7f58-1e37-4258-b827-5aaa53948194 req-52208d52-2f7e-401d-a700-1577ff931102 service nova] Lock "d31f167f-8248-4aef-aa3c-6bc7259e1a80-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1220.978097] env[69994]: DEBUG nova.compute.manager [req-d88c7f58-1e37-4258-b827-5aaa53948194 req-52208d52-2f7e-401d-a700-1577ff931102 service nova] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] No waiting events found dispatching network-vif-plugged-9e330706-3213-4a99-b48a-d2e09db34453 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1220.978152] env[69994]: WARNING nova.compute.manager [req-d88c7f58-1e37-4258-b827-5aaa53948194 req-52208d52-2f7e-401d-a700-1577ff931102 service nova] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Received unexpected event network-vif-plugged-9e330706-3213-4a99-b48a-d2e09db34453 for instance with vm_state shelved_offloaded and task_state spawning. [ 1220.978328] env[69994]: DEBUG nova.compute.manager [req-d88c7f58-1e37-4258-b827-5aaa53948194 req-52208d52-2f7e-401d-a700-1577ff931102 service nova] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Received event network-changed-9e330706-3213-4a99-b48a-d2e09db34453 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1220.978489] env[69994]: DEBUG nova.compute.manager [req-d88c7f58-1e37-4258-b827-5aaa53948194 req-52208d52-2f7e-401d-a700-1577ff931102 service nova] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Refreshing instance network info cache due to event network-changed-9e330706-3213-4a99-b48a-d2e09db34453. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1220.978655] env[69994]: DEBUG oslo_concurrency.lockutils [req-d88c7f58-1e37-4258-b827-5aaa53948194 req-52208d52-2f7e-401d-a700-1577ff931102 service nova] Acquiring lock "refresh_cache-d31f167f-8248-4aef-aa3c-6bc7259e1a80" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1221.343419] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Releasing lock "refresh_cache-d31f167f-8248-4aef-aa3c-6bc7259e1a80" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1221.346198] env[69994]: DEBUG oslo_concurrency.lockutils [req-d88c7f58-1e37-4258-b827-5aaa53948194 req-52208d52-2f7e-401d-a700-1577ff931102 service nova] Acquired lock "refresh_cache-d31f167f-8248-4aef-aa3c-6bc7259e1a80" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1221.346415] env[69994]: DEBUG nova.network.neutron [req-d88c7f58-1e37-4258-b827-5aaa53948194 req-52208d52-2f7e-401d-a700-1577ff931102 service nova] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Refreshing network info cache for port 9e330706-3213-4a99-b48a-d2e09db34453 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1221.369378] env[69994]: DEBUG nova.virt.hardware [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='3f239e010e580c4139f0cc5a6294119c',container_format='bare',created_at=2025-04-03T08:48:45Z,direct_url=,disk_format='vmdk',id=4ad28a5f-4da1-4989-b13a-f34046f115ea,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-2143989948-shelved',owner='605d72502cc644bfa4d875bf348246de',properties=ImageMetaProps,protected=,size=31668224,status='active',tags=,updated_at=2025-04-03T08:49:01Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1221.369650] env[69994]: DEBUG nova.virt.hardware [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1221.369809] env[69994]: DEBUG nova.virt.hardware [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1221.369993] env[69994]: DEBUG nova.virt.hardware [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1221.370158] env[69994]: DEBUG nova.virt.hardware [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1221.370312] env[69994]: DEBUG nova.virt.hardware [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1221.370512] env[69994]: DEBUG nova.virt.hardware [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1221.370688] env[69994]: DEBUG nova.virt.hardware [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1221.370864] env[69994]: DEBUG nova.virt.hardware [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1221.371035] env[69994]: DEBUG nova.virt.hardware [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1221.371214] env[69994]: DEBUG nova.virt.hardware [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1221.372127] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2de33ed-5640-4518-8926-5ac63fec1f50 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.382962] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23edca9e-825f-4bad-bc2b-6a394bbbbc30 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.400512] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fb:d4:c8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '52e117d3-d120-42c6-8e72-70085845acbf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9e330706-3213-4a99-b48a-d2e09db34453', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1221.407983] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1221.412564] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1221.412822] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3ba055cb-338a-4a62-b966-6d5c66c86ddc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.434897] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1221.434897] env[69994]: value = "task-3242736" [ 1221.434897] env[69994]: _type = "Task" [ 1221.434897] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.442167] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242736, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.619016] env[69994]: DEBUG oslo_vmware.rw_handles [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Completed reading data from the image iterator. {{(pid=69994) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1221.619294] env[69994]: DEBUG oslo_vmware.rw_handles [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520f7d46-d308-7529-ec81-da6d0307d1b9/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1221.620524] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cefd361-6c13-4287-8195-b75ac3edd558 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.627356] env[69994]: DEBUG oslo_vmware.rw_handles [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520f7d46-d308-7529-ec81-da6d0307d1b9/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1221.627548] env[69994]: DEBUG oslo_vmware.rw_handles [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520f7d46-d308-7529-ec81-da6d0307d1b9/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1221.628188] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-a26c2168-fb01-4c4b-a43d-3b89b6d34583 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.842148] env[69994]: DEBUG oslo_vmware.rw_handles [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520f7d46-d308-7529-ec81-da6d0307d1b9/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1221.842418] env[69994]: INFO nova.virt.vmwareapi.images [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Downloaded image file data c19374ad-1dca-4b03-a7af-507e74541e09 [ 1221.843506] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-925896bd-4323-4091-8e09-cc49d7de56ef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.860284] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cb9433ac-bfc8-4621-b6cf-d77a63661f4e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.882376] env[69994]: INFO nova.virt.vmwareapi.images [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] The imported VM was unregistered [ 1221.884688] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Caching image {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1221.884914] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Creating directory with path [datastore1] devstack-image-cache_base/c19374ad-1dca-4b03-a7af-507e74541e09 {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1221.885496] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e7d50448-27e7-4b1e-ace1-7c4d668ac0e7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.896302] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Created directory with path [datastore1] devstack-image-cache_base/c19374ad-1dca-4b03-a7af-507e74541e09 {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1221.896481] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_b5717ac7-2d34-446d-b481-70467b02339d/OSTACK_IMG_b5717ac7-2d34-446d-b481-70467b02339d.vmdk to [datastore1] devstack-image-cache_base/c19374ad-1dca-4b03-a7af-507e74541e09/c19374ad-1dca-4b03-a7af-507e74541e09.vmdk. {{(pid=69994) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1221.896715] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-c33a38de-bb0f-4395-bcb8-e3ab9c3d6f47 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.902963] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1221.902963] env[69994]: value = "task-3242738" [ 1221.902963] env[69994]: _type = "Task" [ 1221.902963] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.910411] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242738, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.944170] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242736, 'name': CreateVM_Task, 'duration_secs': 0.365676} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.944335] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1221.944970] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4ad28a5f-4da1-4989-b13a-f34046f115ea" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1221.945141] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4ad28a5f-4da1-4989-b13a-f34046f115ea" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1221.945502] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4ad28a5f-4da1-4989-b13a-f34046f115ea" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1221.945751] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1ebfa03-0d90-4c44-8dae-d18d5ebf865e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.950071] env[69994]: DEBUG oslo_vmware.api [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1221.950071] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5203ec7f-5932-8d55-073e-aebc4d783c55" [ 1221.950071] env[69994]: _type = "Task" [ 1221.950071] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.959797] env[69994]: DEBUG oslo_vmware.api [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5203ec7f-5932-8d55-073e-aebc4d783c55, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.063864] env[69994]: DEBUG nova.network.neutron [req-d88c7f58-1e37-4258-b827-5aaa53948194 req-52208d52-2f7e-401d-a700-1577ff931102 service nova] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Updated VIF entry in instance network info cache for port 9e330706-3213-4a99-b48a-d2e09db34453. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1222.064303] env[69994]: DEBUG nova.network.neutron [req-d88c7f58-1e37-4258-b827-5aaa53948194 req-52208d52-2f7e-401d-a700-1577ff931102 service nova] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Updating instance_info_cache with network_info: [{"id": "9e330706-3213-4a99-b48a-d2e09db34453", "address": "fa:16:3e:fb:d4:c8", "network": {"id": "6d4a143e-4f55-4918-8454-462892aacf0e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1594130263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "605d72502cc644bfa4d875bf348246de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52e117d3-d120-42c6-8e72-70085845acbf", "external-id": "nsx-vlan-transportzone-934", "segmentation_id": 934, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e330706-32", "ovs_interfaceid": "9e330706-3213-4a99-b48a-d2e09db34453", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1222.412522] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242738, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.461156] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4ad28a5f-4da1-4989-b13a-f34046f115ea" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1222.461377] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Processing image 4ad28a5f-4da1-4989-b13a-f34046f115ea {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1222.461560] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4ad28a5f-4da1-4989-b13a-f34046f115ea/4ad28a5f-4da1-4989-b13a-f34046f115ea.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1222.461704] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4ad28a5f-4da1-4989-b13a-f34046f115ea/4ad28a5f-4da1-4989-b13a-f34046f115ea.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1222.461884] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1222.462169] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2c987b73-3494-4ff0-b0fa-778a3f74435c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.479130] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1222.479321] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1222.480149] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c6c366a-07ce-4560-830e-9e26308d0a84 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.486071] env[69994]: DEBUG oslo_vmware.api [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1222.486071] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52e8bfcc-62b9-f1cc-8b06-18362080837e" [ 1222.486071] env[69994]: _type = "Task" [ 1222.486071] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.494359] env[69994]: DEBUG oslo_vmware.api [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52e8bfcc-62b9-f1cc-8b06-18362080837e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.567309] env[69994]: DEBUG oslo_concurrency.lockutils [req-d88c7f58-1e37-4258-b827-5aaa53948194 req-52208d52-2f7e-401d-a700-1577ff931102 service nova] Releasing lock "refresh_cache-d31f167f-8248-4aef-aa3c-6bc7259e1a80" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1222.888015] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6e0f3493-4583-4a60-b1de-40a64d4f738f tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "be421d40-9859-4e0d-aef8-a2feb8717a78" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1222.888324] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6e0f3493-4583-4a60-b1de-40a64d4f738f tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "be421d40-9859-4e0d-aef8-a2feb8717a78" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1222.913442] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242738, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.997169] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Preparing fetch location {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1222.997432] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Fetch image to [datastore1] OSTACK_IMG_47aa940b-36a1-4c83-8f00-fca2714d2e91/OSTACK_IMG_47aa940b-36a1-4c83-8f00-fca2714d2e91.vmdk {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1222.997615] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Downloading stream optimized image 4ad28a5f-4da1-4989-b13a-f34046f115ea to [datastore1] OSTACK_IMG_47aa940b-36a1-4c83-8f00-fca2714d2e91/OSTACK_IMG_47aa940b-36a1-4c83-8f00-fca2714d2e91.vmdk on the data store datastore1 as vApp {{(pid=69994) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1222.997782] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Downloading image file data 4ad28a5f-4da1-4989-b13a-f34046f115ea to the ESX as VM named 'OSTACK_IMG_47aa940b-36a1-4c83-8f00-fca2714d2e91' {{(pid=69994) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1223.078172] env[69994]: DEBUG oslo_vmware.rw_handles [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1223.078172] env[69994]: value = "resgroup-9" [ 1223.078172] env[69994]: _type = "ResourcePool" [ 1223.078172] env[69994]: }. {{(pid=69994) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1223.078571] env[69994]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-625d01df-3c61-4147-9016-98911e4d94ce {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.100815] env[69994]: DEBUG oslo_vmware.rw_handles [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lease: (returnval){ [ 1223.100815] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52949ccf-edb6-71fb-4867-58b712b14d8b" [ 1223.100815] env[69994]: _type = "HttpNfcLease" [ 1223.100815] env[69994]: } obtained for vApp import into resource pool (val){ [ 1223.100815] env[69994]: value = "resgroup-9" [ 1223.100815] env[69994]: _type = "ResourcePool" [ 1223.100815] env[69994]: }. {{(pid=69994) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1223.101220] env[69994]: DEBUG oslo_vmware.api [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the lease: (returnval){ [ 1223.101220] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52949ccf-edb6-71fb-4867-58b712b14d8b" [ 1223.101220] env[69994]: _type = "HttpNfcLease" [ 1223.101220] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1223.108023] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1223.108023] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52949ccf-edb6-71fb-4867-58b712b14d8b" [ 1223.108023] env[69994]: _type = "HttpNfcLease" [ 1223.108023] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1223.392181] env[69994]: INFO nova.compute.manager [None req-6e0f3493-4583-4a60-b1de-40a64d4f738f tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Detaching volume 11b8e021-7578-46be-b87c-5051c0a1cb4d [ 1223.413505] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242738, 'name': MoveVirtualDisk_Task} progress is 69%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.427101] env[69994]: INFO nova.virt.block_device [None req-6e0f3493-4583-4a60-b1de-40a64d4f738f tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Attempting to driver detach volume 11b8e021-7578-46be-b87c-5051c0a1cb4d from mountpoint /dev/sdb [ 1223.427352] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e0f3493-4583-4a60-b1de-40a64d4f738f tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Volume detach. Driver type: vmdk {{(pid=69994) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1223.427539] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e0f3493-4583-4a60-b1de-40a64d4f738f tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-648027', 'volume_id': '11b8e021-7578-46be-b87c-5051c0a1cb4d', 'name': 'volume-11b8e021-7578-46be-b87c-5051c0a1cb4d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'be421d40-9859-4e0d-aef8-a2feb8717a78', 'attached_at': '', 'detached_at': '', 'volume_id': '11b8e021-7578-46be-b87c-5051c0a1cb4d', 'serial': '11b8e021-7578-46be-b87c-5051c0a1cb4d'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1223.428459] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4b35078-1b58-457a-8cc5-38668355b3ec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.450666] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6049715a-cc69-4489-b79a-a0b3f738f045 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.458065] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a3dd653-abe8-4637-a51b-b6485c9dc3b7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.479074] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9174cdf1-c20e-4609-8770-e795f8a8787d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.497206] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e0f3493-4583-4a60-b1de-40a64d4f738f tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] The volume has not been displaced from its original location: [datastore2] volume-11b8e021-7578-46be-b87c-5051c0a1cb4d/volume-11b8e021-7578-46be-b87c-5051c0a1cb4d.vmdk. No consolidation needed. {{(pid=69994) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1223.502922] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e0f3493-4583-4a60-b1de-40a64d4f738f tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Reconfiguring VM instance instance-00000064 to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1223.503324] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f58d37fc-3c06-4f4c-b503-04d186bc877d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.521826] env[69994]: DEBUG oslo_vmware.api [None req-6e0f3493-4583-4a60-b1de-40a64d4f738f tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1223.521826] env[69994]: value = "task-3242740" [ 1223.521826] env[69994]: _type = "Task" [ 1223.521826] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.530659] env[69994]: DEBUG oslo_vmware.api [None req-6e0f3493-4583-4a60-b1de-40a64d4f738f tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242740, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.609510] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1223.609510] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52949ccf-edb6-71fb-4867-58b712b14d8b" [ 1223.609510] env[69994]: _type = "HttpNfcLease" [ 1223.609510] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1223.914390] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242738, 'name': MoveVirtualDisk_Task} progress is 91%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.031612] env[69994]: DEBUG oslo_vmware.api [None req-6e0f3493-4583-4a60-b1de-40a64d4f738f tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242740, 'name': ReconfigVM_Task, 'duration_secs': 0.272349} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.031877] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e0f3493-4583-4a60-b1de-40a64d4f738f tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Reconfigured VM instance instance-00000064 to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1224.037130] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-858c4678-0786-4a5b-8264-359b2febbeb9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.054053] env[69994]: DEBUG oslo_vmware.api [None req-6e0f3493-4583-4a60-b1de-40a64d4f738f tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1224.054053] env[69994]: value = "task-3242741" [ 1224.054053] env[69994]: _type = "Task" [ 1224.054053] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.062611] env[69994]: DEBUG oslo_vmware.api [None req-6e0f3493-4583-4a60-b1de-40a64d4f738f tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242741, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.109782] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1224.109782] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52949ccf-edb6-71fb-4867-58b712b14d8b" [ 1224.109782] env[69994]: _type = "HttpNfcLease" [ 1224.109782] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1224.110265] env[69994]: DEBUG oslo_vmware.rw_handles [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1224.110265] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52949ccf-edb6-71fb-4867-58b712b14d8b" [ 1224.110265] env[69994]: _type = "HttpNfcLease" [ 1224.110265] env[69994]: }. {{(pid=69994) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1224.110845] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0891380d-a8af-4ff6-9c1f-fe5fe0718f86 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.118504] env[69994]: DEBUG oslo_vmware.rw_handles [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527647c3-1dfd-2dd7-f03b-114381a40683/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1224.118684] env[69994]: DEBUG oslo_vmware.rw_handles [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Creating HTTP connection to write to file with size = 31668224 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527647c3-1dfd-2dd7-f03b-114381a40683/disk-0.vmdk. {{(pid=69994) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1224.182213] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-0f0fd08a-ed55-44bb-93c6-4542bfcb15fb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.415713] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242738, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.245641} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.415990] env[69994]: INFO nova.virt.vmwareapi.ds_util [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_b5717ac7-2d34-446d-b481-70467b02339d/OSTACK_IMG_b5717ac7-2d34-446d-b481-70467b02339d.vmdk to [datastore1] devstack-image-cache_base/c19374ad-1dca-4b03-a7af-507e74541e09/c19374ad-1dca-4b03-a7af-507e74541e09.vmdk. [ 1224.416193] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Cleaning up location [datastore1] OSTACK_IMG_b5717ac7-2d34-446d-b481-70467b02339d {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1224.416377] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_b5717ac7-2d34-446d-b481-70467b02339d {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1224.416601] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8077b5c1-32c5-4499-b013-6b5a828ffbf5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.423641] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1224.423641] env[69994]: value = "task-3242742" [ 1224.423641] env[69994]: _type = "Task" [ 1224.423641] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.433783] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242742, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.563686] env[69994]: DEBUG oslo_vmware.api [None req-6e0f3493-4583-4a60-b1de-40a64d4f738f tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242741, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.935842] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242742, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.0429} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.937247] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1224.937484] env[69994]: DEBUG oslo_concurrency.lockutils [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c19374ad-1dca-4b03-a7af-507e74541e09/c19374ad-1dca-4b03-a7af-507e74541e09.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1224.937862] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c19374ad-1dca-4b03-a7af-507e74541e09/c19374ad-1dca-4b03-a7af-507e74541e09.vmdk to [datastore1] 03e58b14-12fe-46e5-b483-4176d5a43c0e/03e58b14-12fe-46e5-b483-4176d5a43c0e.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1224.938215] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9ec05947-c96f-4c69-9634-0e625d7707db {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.946846] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1224.946846] env[69994]: value = "task-3242743" [ 1224.946846] env[69994]: _type = "Task" [ 1224.946846] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.955817] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242743, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.064768] env[69994]: DEBUG oslo_vmware.api [None req-6e0f3493-4583-4a60-b1de-40a64d4f738f tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242741, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.367525] env[69994]: DEBUG oslo_vmware.rw_handles [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Completed reading data from the image iterator. {{(pid=69994) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1225.367868] env[69994]: DEBUG oslo_vmware.rw_handles [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527647c3-1dfd-2dd7-f03b-114381a40683/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1225.368690] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-752a6be2-fc49-4ca5-b4e4-7db3e9c1da1e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.375470] env[69994]: DEBUG oslo_vmware.rw_handles [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527647c3-1dfd-2dd7-f03b-114381a40683/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1225.375639] env[69994]: DEBUG oslo_vmware.rw_handles [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527647c3-1dfd-2dd7-f03b-114381a40683/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1225.375911] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-11dd7caf-0a23-4c8d-a533-9d2cc519122b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.456994] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242743, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.565764] env[69994]: DEBUG oslo_vmware.api [None req-6e0f3493-4583-4a60-b1de-40a64d4f738f tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242741, 'name': ReconfigVM_Task, 'duration_secs': 1.162381} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.566304] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e0f3493-4583-4a60-b1de-40a64d4f738f tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-648027', 'volume_id': '11b8e021-7578-46be-b87c-5051c0a1cb4d', 'name': 'volume-11b8e021-7578-46be-b87c-5051c0a1cb4d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'be421d40-9859-4e0d-aef8-a2feb8717a78', 'attached_at': '', 'detached_at': '', 'volume_id': '11b8e021-7578-46be-b87c-5051c0a1cb4d', 'serial': '11b8e021-7578-46be-b87c-5051c0a1cb4d'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1225.624488] env[69994]: DEBUG oslo_vmware.rw_handles [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527647c3-1dfd-2dd7-f03b-114381a40683/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1225.624788] env[69994]: INFO nova.virt.vmwareapi.images [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Downloaded image file data 4ad28a5f-4da1-4989-b13a-f34046f115ea [ 1225.625671] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a6e51fe-9e9a-4d9b-ae7f-82a93744f96c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.641665] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c0a97e27-7974-4209-aed4-16bd18474f73 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.667862] env[69994]: INFO nova.virt.vmwareapi.images [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] The imported VM was unregistered [ 1225.670365] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Caching image {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1225.670602] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Creating directory with path [datastore1] devstack-image-cache_base/4ad28a5f-4da1-4989-b13a-f34046f115ea {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1225.670901] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eda8708c-d995-4a90-af81-9de1778dbfe6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.717720] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Created directory with path [datastore1] devstack-image-cache_base/4ad28a5f-4da1-4989-b13a-f34046f115ea {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1225.717941] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_47aa940b-36a1-4c83-8f00-fca2714d2e91/OSTACK_IMG_47aa940b-36a1-4c83-8f00-fca2714d2e91.vmdk to [datastore1] devstack-image-cache_base/4ad28a5f-4da1-4989-b13a-f34046f115ea/4ad28a5f-4da1-4989-b13a-f34046f115ea.vmdk. {{(pid=69994) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1225.718254] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-56a8b343-350c-42b5-a963-747153eaae45 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.726673] env[69994]: DEBUG oslo_vmware.api [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1225.726673] env[69994]: value = "task-3242745" [ 1225.726673] env[69994]: _type = "Task" [ 1225.726673] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.734724] env[69994]: DEBUG oslo_vmware.api [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242745, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.958913] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242743, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.112680] env[69994]: DEBUG nova.objects.instance [None req-6e0f3493-4583-4a60-b1de-40a64d4f738f tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lazy-loading 'flavor' on Instance uuid be421d40-9859-4e0d-aef8-a2feb8717a78 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1226.236862] env[69994]: DEBUG oslo_vmware.api [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242745, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.459882] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242743, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.737868] env[69994]: DEBUG oslo_vmware.api [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242745, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.962183] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242743, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.120827] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6e0f3493-4583-4a60-b1de-40a64d4f738f tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "be421d40-9859-4e0d-aef8-a2feb8717a78" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.232s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1227.241332] env[69994]: DEBUG oslo_vmware.api [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242745, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.281846] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f6c1365e-59f9-464f-8bd4-ff655fdc87d8 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "be421d40-9859-4e0d-aef8-a2feb8717a78" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1227.282218] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f6c1365e-59f9-464f-8bd4-ff655fdc87d8 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "be421d40-9859-4e0d-aef8-a2feb8717a78" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1227.282496] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f6c1365e-59f9-464f-8bd4-ff655fdc87d8 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "be421d40-9859-4e0d-aef8-a2feb8717a78-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1227.282740] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f6c1365e-59f9-464f-8bd4-ff655fdc87d8 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "be421d40-9859-4e0d-aef8-a2feb8717a78-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1227.282992] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f6c1365e-59f9-464f-8bd4-ff655fdc87d8 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "be421d40-9859-4e0d-aef8-a2feb8717a78-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1227.285857] env[69994]: INFO nova.compute.manager [None req-f6c1365e-59f9-464f-8bd4-ff655fdc87d8 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Terminating instance [ 1227.463586] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242743, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.427698} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.464128] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c19374ad-1dca-4b03-a7af-507e74541e09/c19374ad-1dca-4b03-a7af-507e74541e09.vmdk to [datastore1] 03e58b14-12fe-46e5-b483-4176d5a43c0e/03e58b14-12fe-46e5-b483-4176d5a43c0e.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1227.465296] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc08233d-13cc-46d3-bd58-6ef0509cbe2b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.501746] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] 03e58b14-12fe-46e5-b483-4176d5a43c0e/03e58b14-12fe-46e5-b483-4176d5a43c0e.vmdk or device None with type streamOptimized {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1227.502129] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-abf2b1f5-6ee0-4170-8db4-886a9d21c530 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.533809] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1227.533809] env[69994]: value = "task-3242746" [ 1227.533809] env[69994]: _type = "Task" [ 1227.533809] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.548597] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242746, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.742326] env[69994]: DEBUG oslo_vmware.api [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242745, 'name': MoveVirtualDisk_Task} progress is 15%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.789471] env[69994]: DEBUG nova.compute.manager [None req-f6c1365e-59f9-464f-8bd4-ff655fdc87d8 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1227.789712] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f6c1365e-59f9-464f-8bd4-ff655fdc87d8 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1227.790712] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f953e6d4-2e9c-4b2b-be9d-e1256a695468 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.801394] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6c1365e-59f9-464f-8bd4-ff655fdc87d8 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1227.801737] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f2077cb9-e688-4eaf-8219-432df5da87a6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.808711] env[69994]: DEBUG oslo_vmware.api [None req-f6c1365e-59f9-464f-8bd4-ff655fdc87d8 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1227.808711] env[69994]: value = "task-3242747" [ 1227.808711] env[69994]: _type = "Task" [ 1227.808711] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.818246] env[69994]: DEBUG oslo_vmware.api [None req-f6c1365e-59f9-464f-8bd4-ff655fdc87d8 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242747, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.044664] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242746, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.238679] env[69994]: DEBUG oslo_vmware.api [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242745, 'name': MoveVirtualDisk_Task} progress is 35%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.320146] env[69994]: DEBUG oslo_vmware.api [None req-f6c1365e-59f9-464f-8bd4-ff655fdc87d8 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242747, 'name': PowerOffVM_Task, 'duration_secs': 0.265751} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.320146] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6c1365e-59f9-464f-8bd4-ff655fdc87d8 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1228.320349] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f6c1365e-59f9-464f-8bd4-ff655fdc87d8 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1228.320522] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-904def18-bce3-4627-a726-42e374f64d58 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.399684] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f6c1365e-59f9-464f-8bd4-ff655fdc87d8 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1228.400122] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f6c1365e-59f9-464f-8bd4-ff655fdc87d8 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1228.400495] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6c1365e-59f9-464f-8bd4-ff655fdc87d8 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Deleting the datastore file [datastore2] be421d40-9859-4e0d-aef8-a2feb8717a78 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1228.401078] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-67099116-8af9-4ae6-8c37-f0668be54e81 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.411730] env[69994]: DEBUG oslo_vmware.api [None req-f6c1365e-59f9-464f-8bd4-ff655fdc87d8 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1228.411730] env[69994]: value = "task-3242749" [ 1228.411730] env[69994]: _type = "Task" [ 1228.411730] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.423675] env[69994]: DEBUG oslo_vmware.api [None req-f6c1365e-59f9-464f-8bd4-ff655fdc87d8 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242749, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.546166] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242746, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.740047] env[69994]: DEBUG oslo_vmware.api [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242745, 'name': MoveVirtualDisk_Task} progress is 54%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.923182] env[69994]: DEBUG oslo_vmware.api [None req-f6c1365e-59f9-464f-8bd4-ff655fdc87d8 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242749, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.213272} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.923563] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6c1365e-59f9-464f-8bd4-ff655fdc87d8 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1228.923887] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f6c1365e-59f9-464f-8bd4-ff655fdc87d8 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1228.924172] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f6c1365e-59f9-464f-8bd4-ff655fdc87d8 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1228.924413] env[69994]: INFO nova.compute.manager [None req-f6c1365e-59f9-464f-8bd4-ff655fdc87d8 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1228.924731] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f6c1365e-59f9-464f-8bd4-ff655fdc87d8 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1228.925095] env[69994]: DEBUG nova.compute.manager [-] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1228.925259] env[69994]: DEBUG nova.network.neutron [-] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1229.047809] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242746, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.240458] env[69994]: DEBUG oslo_vmware.api [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242745, 'name': MoveVirtualDisk_Task} progress is 77%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.435197] env[69994]: DEBUG nova.compute.manager [req-355ed945-0835-4e62-91d2-b0405f843d8b req-166343bc-b2a8-476f-82bc-0f81d86a9bd9 service nova] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Received event network-vif-deleted-ba2c9555-1cfb-479b-b793-f20615723d77 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1229.435197] env[69994]: INFO nova.compute.manager [req-355ed945-0835-4e62-91d2-b0405f843d8b req-166343bc-b2a8-476f-82bc-0f81d86a9bd9 service nova] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Neutron deleted interface ba2c9555-1cfb-479b-b793-f20615723d77; detaching it from the instance and deleting it from the info cache [ 1229.435197] env[69994]: DEBUG nova.network.neutron [req-355ed945-0835-4e62-91d2-b0405f843d8b req-166343bc-b2a8-476f-82bc-0f81d86a9bd9 service nova] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1229.547236] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242746, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.740119] env[69994]: DEBUG oslo_vmware.api [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242745, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.914052] env[69994]: DEBUG nova.network.neutron [-] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1229.937818] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d19ad408-4d79-496f-8c12-22c665ae3e2c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.948885] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ac206bd-1d19-4826-ae6d-e4c9e617ad25 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.979606] env[69994]: DEBUG nova.compute.manager [req-355ed945-0835-4e62-91d2-b0405f843d8b req-166343bc-b2a8-476f-82bc-0f81d86a9bd9 service nova] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Detach interface failed, port_id=ba2c9555-1cfb-479b-b793-f20615723d77, reason: Instance be421d40-9859-4e0d-aef8-a2feb8717a78 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1230.046813] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242746, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.241030] env[69994]: DEBUG oslo_vmware.api [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242745, 'name': MoveVirtualDisk_Task, 'duration_secs': 4.477873} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.241346] env[69994]: INFO nova.virt.vmwareapi.ds_util [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_47aa940b-36a1-4c83-8f00-fca2714d2e91/OSTACK_IMG_47aa940b-36a1-4c83-8f00-fca2714d2e91.vmdk to [datastore1] devstack-image-cache_base/4ad28a5f-4da1-4989-b13a-f34046f115ea/4ad28a5f-4da1-4989-b13a-f34046f115ea.vmdk. [ 1230.241539] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Cleaning up location [datastore1] OSTACK_IMG_47aa940b-36a1-4c83-8f00-fca2714d2e91 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1230.241702] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_47aa940b-36a1-4c83-8f00-fca2714d2e91 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1230.241982] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0c7a2495-fe61-4ecd-a8cf-530a92ea1217 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.247596] env[69994]: DEBUG oslo_vmware.api [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1230.247596] env[69994]: value = "task-3242750" [ 1230.247596] env[69994]: _type = "Task" [ 1230.247596] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.255164] env[69994]: DEBUG oslo_vmware.api [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242750, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.417082] env[69994]: INFO nova.compute.manager [-] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Took 1.49 seconds to deallocate network for instance. [ 1230.547093] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242746, 'name': ReconfigVM_Task, 'duration_secs': 2.643878} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.547373] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Reconfigured VM instance instance-00000067 to attach disk [datastore1] 03e58b14-12fe-46e5-b483-4176d5a43c0e/03e58b14-12fe-46e5-b483-4176d5a43c0e.vmdk or device None with type streamOptimized {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1230.548501] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'disk_bus': None, 'encryption_secret_uuid': None, 'boot_index': 0, 'guest_format': None, 'device_name': '/dev/sda', 'encryption_format': None, 'size': 0, 'device_type': 'disk', 'encryption_options': None, 'encrypted': False, 'image_id': 'cc2e14cc-b12f-480a-a387-dd21e9efda8b'}], 'ephemerals': [], 'block_device_mapping': [{'attachment_id': '8aa6cb0c-bab9-4078-8e9b-c99ffb3ee5e4', 'disk_bus': None, 'guest_format': None, 'delete_on_termination': False, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-648040', 'volume_id': '7f73fdd9-c95c-42cc-91c9-d54e30fd2995', 'name': 'volume-7f73fdd9-c95c-42cc-91c9-d54e30fd2995', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '03e58b14-12fe-46e5-b483-4176d5a43c0e', 'attached_at': '', 'detached_at': '', 'volume_id': '7f73fdd9-c95c-42cc-91c9-d54e30fd2995', 'serial': '7f73fdd9-c95c-42cc-91c9-d54e30fd2995'}, 'mount_device': '/dev/sdb', 'boot_index': None, 'device_type': None, 'volume_type': None}], 'swap': None} {{(pid=69994) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1230.548702] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Volume attach. Driver type: vmdk {{(pid=69994) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1230.548887] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-648040', 'volume_id': '7f73fdd9-c95c-42cc-91c9-d54e30fd2995', 'name': 'volume-7f73fdd9-c95c-42cc-91c9-d54e30fd2995', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '03e58b14-12fe-46e5-b483-4176d5a43c0e', 'attached_at': '', 'detached_at': '', 'volume_id': '7f73fdd9-c95c-42cc-91c9-d54e30fd2995', 'serial': '7f73fdd9-c95c-42cc-91c9-d54e30fd2995'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1230.549637] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63310409-e1eb-4da1-be8d-32891a4138c7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.564770] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b83e381-d098-4815-bc0f-844baae46ad0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.588662] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] volume-7f73fdd9-c95c-42cc-91c9-d54e30fd2995/volume-7f73fdd9-c95c-42cc-91c9-d54e30fd2995.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1230.588891] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4eea465c-dd13-42cd-8e20-25cc48011b54 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.605974] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1230.605974] env[69994]: value = "task-3242751" [ 1230.605974] env[69994]: _type = "Task" [ 1230.605974] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.615566] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242751, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.757660] env[69994]: DEBUG oslo_vmware.api [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242750, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.050655} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.757886] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1230.758094] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4ad28a5f-4da1-4989-b13a-f34046f115ea/4ad28a5f-4da1-4989-b13a-f34046f115ea.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1230.758366] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4ad28a5f-4da1-4989-b13a-f34046f115ea/4ad28a5f-4da1-4989-b13a-f34046f115ea.vmdk to [datastore1] d31f167f-8248-4aef-aa3c-6bc7259e1a80/d31f167f-8248-4aef-aa3c-6bc7259e1a80.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1230.758610] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7e888104-c491-401e-aa7b-7e5b8e988e52 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.765095] env[69994]: DEBUG oslo_vmware.api [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1230.765095] env[69994]: value = "task-3242752" [ 1230.765095] env[69994]: _type = "Task" [ 1230.765095] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.774610] env[69994]: DEBUG oslo_vmware.api [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242752, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.923382] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f6c1365e-59f9-464f-8bd4-ff655fdc87d8 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1230.923695] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f6c1365e-59f9-464f-8bd4-ff655fdc87d8 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1230.923926] env[69994]: DEBUG nova.objects.instance [None req-f6c1365e-59f9-464f-8bd4-ff655fdc87d8 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lazy-loading 'resources' on Instance uuid be421d40-9859-4e0d-aef8-a2feb8717a78 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1231.116577] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242751, 'name': ReconfigVM_Task, 'duration_secs': 0.326511} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.116944] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Reconfigured VM instance instance-00000067 to attach disk [datastore2] volume-7f73fdd9-c95c-42cc-91c9-d54e30fd2995/volume-7f73fdd9-c95c-42cc-91c9-d54e30fd2995.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1231.121941] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ee0201f-df45-4bfd-9dfe-94146e3f4200 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.137779] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1231.137779] env[69994]: value = "task-3242753" [ 1231.137779] env[69994]: _type = "Task" [ 1231.137779] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.146598] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242753, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.275190] env[69994]: DEBUG oslo_vmware.api [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242752, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.532818] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ea80493-cc00-4c7f-a8e0-839001cf893a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.540433] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dfe41a8-345b-4c18-ab9a-b8b6446a4a89 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.571275] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-911afc37-351e-4659-a71f-ba3920cc2242 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.578890] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a996c2a-fd2a-4a8e-8a18-a3ecb640d9de {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.592350] env[69994]: DEBUG nova.compute.provider_tree [None req-f6c1365e-59f9-464f-8bd4-ff655fdc87d8 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1231.647493] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242753, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.777398] env[69994]: DEBUG oslo_vmware.api [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242752, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.095590] env[69994]: DEBUG nova.scheduler.client.report [None req-f6c1365e-59f9-464f-8bd4-ff655fdc87d8 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1232.148885] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242753, 'name': ReconfigVM_Task, 'duration_secs': 0.911604} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.149241] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-648040', 'volume_id': '7f73fdd9-c95c-42cc-91c9-d54e30fd2995', 'name': 'volume-7f73fdd9-c95c-42cc-91c9-d54e30fd2995', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '03e58b14-12fe-46e5-b483-4176d5a43c0e', 'attached_at': '', 'detached_at': '', 'volume_id': '7f73fdd9-c95c-42cc-91c9-d54e30fd2995', 'serial': '7f73fdd9-c95c-42cc-91c9-d54e30fd2995'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1232.149841] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5ca22032-3fe9-46cb-9798-e7b57ab3cc72 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.156666] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1232.156666] env[69994]: value = "task-3242754" [ 1232.156666] env[69994]: _type = "Task" [ 1232.156666] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.165505] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242754, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.276152] env[69994]: DEBUG oslo_vmware.api [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242752, 'name': CopyVirtualDisk_Task} progress is 69%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.601284] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f6c1365e-59f9-464f-8bd4-ff655fdc87d8 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.677s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1232.623835] env[69994]: INFO nova.scheduler.client.report [None req-f6c1365e-59f9-464f-8bd4-ff655fdc87d8 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Deleted allocations for instance be421d40-9859-4e0d-aef8-a2feb8717a78 [ 1232.667619] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242754, 'name': Rename_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.776801] env[69994]: DEBUG oslo_vmware.api [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242752, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.131427] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f6c1365e-59f9-464f-8bd4-ff655fdc87d8 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "be421d40-9859-4e0d-aef8-a2feb8717a78" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.849s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1233.167062] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242754, 'name': Rename_Task, 'duration_secs': 0.855642} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.167333] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1233.167584] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7640998a-921e-446a-b365-9d7bc901dbbe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.173467] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1233.173467] env[69994]: value = "task-3242755" [ 1233.173467] env[69994]: _type = "Task" [ 1233.173467] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.181034] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242755, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.277152] env[69994]: DEBUG oslo_vmware.api [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242752, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.197597} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.277405] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4ad28a5f-4da1-4989-b13a-f34046f115ea/4ad28a5f-4da1-4989-b13a-f34046f115ea.vmdk to [datastore1] d31f167f-8248-4aef-aa3c-6bc7259e1a80/d31f167f-8248-4aef-aa3c-6bc7259e1a80.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1233.278176] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cf9c28d-7f2e-4928-88b5-dd2e30e06a87 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.300053] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] d31f167f-8248-4aef-aa3c-6bc7259e1a80/d31f167f-8248-4aef-aa3c-6bc7259e1a80.vmdk or device None with type streamOptimized {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1233.300294] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1b185892-9704-4c22-8dd0-bcaba241cb8b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.318717] env[69994]: DEBUG oslo_vmware.api [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1233.318717] env[69994]: value = "task-3242756" [ 1233.318717] env[69994]: _type = "Task" [ 1233.318717] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.326046] env[69994]: DEBUG oslo_vmware.api [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242756, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.685249] env[69994]: DEBUG oslo_vmware.api [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242755, 'name': PowerOnVM_Task, 'duration_secs': 0.474873} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.685527] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1233.784368] env[69994]: DEBUG nova.compute.manager [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1233.785342] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-468c680f-22bd-4124-aaf9-168fc874da36 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.833728] env[69994]: DEBUG oslo_vmware.api [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242756, 'name': ReconfigVM_Task, 'duration_secs': 0.329155} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.834036] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Reconfigured VM instance instance-00000068 to attach disk [datastore1] d31f167f-8248-4aef-aa3c-6bc7259e1a80/d31f167f-8248-4aef-aa3c-6bc7259e1a80.vmdk or device None with type streamOptimized {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1233.835341] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-069116f3-941f-4282-ad5a-974842ec7cf2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.841887] env[69994]: DEBUG oslo_vmware.api [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1233.841887] env[69994]: value = "task-3242757" [ 1233.841887] env[69994]: _type = "Task" [ 1233.841887] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.850757] env[69994]: DEBUG oslo_vmware.api [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242757, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.304964] env[69994]: DEBUG oslo_concurrency.lockutils [None req-985e13df-c9b7-4062-8efb-42b9681daf85 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lock "03e58b14-12fe-46e5-b483-4176d5a43c0e" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 29.241s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1234.354034] env[69994]: DEBUG oslo_vmware.api [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242757, 'name': Rename_Task, 'duration_secs': 0.148268} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.354034] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1234.354230] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9581d969-57dc-48f6-9dd2-80924e02008b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.360328] env[69994]: DEBUG oslo_vmware.api [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1234.360328] env[69994]: value = "task-3242758" [ 1234.360328] env[69994]: _type = "Task" [ 1234.360328] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.367623] env[69994]: DEBUG oslo_vmware.api [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242758, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.442712] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8406543b-f4fb-4588-8b3d-97dbd6931403 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "interface-70fcf5b1-213f-4ff9-b675-282e7aa30e20-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1234.443039] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8406543b-f4fb-4588-8b3d-97dbd6931403 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "interface-70fcf5b1-213f-4ff9-b675-282e7aa30e20-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1234.443429] env[69994]: DEBUG nova.objects.instance [None req-8406543b-f4fb-4588-8b3d-97dbd6931403 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lazy-loading 'flavor' on Instance uuid 70fcf5b1-213f-4ff9-b675-282e7aa30e20 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1234.870254] env[69994]: DEBUG oslo_vmware.api [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242758, 'name': PowerOnVM_Task, 'duration_secs': 0.484522} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.870584] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1234.935427] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "6b73608e-b62f-4292-870c-51f1c686e569" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1234.935681] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "6b73608e-b62f-4292-870c-51f1c686e569" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1234.968643] env[69994]: DEBUG nova.compute.manager [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1234.969688] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02b06dd7-6202-4349-a3e4-0bd9934c65cb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.020999] env[69994]: DEBUG nova.objects.instance [None req-8406543b-f4fb-4588-8b3d-97dbd6931403 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lazy-loading 'pci_requests' on Instance uuid 70fcf5b1-213f-4ff9-b675-282e7aa30e20 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1235.437583] env[69994]: DEBUG nova.compute.manager [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1235.487480] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3e8f6cee-441e-4193-8b83-f3bac3c5b473 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "d31f167f-8248-4aef-aa3c-6bc7259e1a80" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 22.942s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1235.522849] env[69994]: DEBUG nova.objects.base [None req-8406543b-f4fb-4588-8b3d-97dbd6931403 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Object Instance<70fcf5b1-213f-4ff9-b675-282e7aa30e20> lazy-loaded attributes: flavor,pci_requests {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1235.523113] env[69994]: DEBUG nova.network.neutron [None req-8406543b-f4fb-4588-8b3d-97dbd6931403 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1235.562370] env[69994]: DEBUG nova.policy [None req-8406543b-f4fb-4588-8b3d-97dbd6931403 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aa110088642d455baaf060b5c9daaf5b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '70d05b502dfd4c5282872339c1e34d0c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1235.837847] env[69994]: DEBUG nova.network.neutron [None req-8406543b-f4fb-4588-8b3d-97dbd6931403 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Successfully created port: c4c4e36c-239f-4506-bd4b-c442429d76fa {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1235.959866] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1235.960147] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1235.961627] env[69994]: INFO nova.compute.claims [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1236.418909] env[69994]: DEBUG oslo_concurrency.lockutils [None req-20300fb7-2e00-439a-a634-a479773cba21 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "d31f167f-8248-4aef-aa3c-6bc7259e1a80" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1236.419258] env[69994]: DEBUG oslo_concurrency.lockutils [None req-20300fb7-2e00-439a-a634-a479773cba21 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "d31f167f-8248-4aef-aa3c-6bc7259e1a80" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1236.419508] env[69994]: DEBUG oslo_concurrency.lockutils [None req-20300fb7-2e00-439a-a634-a479773cba21 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "d31f167f-8248-4aef-aa3c-6bc7259e1a80-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1236.419711] env[69994]: DEBUG oslo_concurrency.lockutils [None req-20300fb7-2e00-439a-a634-a479773cba21 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "d31f167f-8248-4aef-aa3c-6bc7259e1a80-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1236.419910] env[69994]: DEBUG oslo_concurrency.lockutils [None req-20300fb7-2e00-439a-a634-a479773cba21 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "d31f167f-8248-4aef-aa3c-6bc7259e1a80-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1236.422318] env[69994]: INFO nova.compute.manager [None req-20300fb7-2e00-439a-a634-a479773cba21 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Terminating instance [ 1236.926689] env[69994]: DEBUG nova.compute.manager [None req-20300fb7-2e00-439a-a634-a479773cba21 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1236.927023] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-20300fb7-2e00-439a-a634-a479773cba21 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1236.927961] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58625576-5528-4eca-8e25-708149a76c1f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.936047] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-20300fb7-2e00-439a-a634-a479773cba21 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1236.936291] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c9608397-0a41-40b5-9664-8a90548d7766 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.942555] env[69994]: DEBUG oslo_vmware.api [None req-20300fb7-2e00-439a-a634-a479773cba21 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1236.942555] env[69994]: value = "task-3242759" [ 1236.942555] env[69994]: _type = "Task" [ 1236.942555] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.950075] env[69994]: DEBUG oslo_vmware.api [None req-20300fb7-2e00-439a-a634-a479773cba21 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242759, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.054029] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d35bfd90-5686-46cc-a7da-199aba7aad13 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.061631] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3893db8-1cae-4696-9602-4afa9ac08044 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.093337] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65ee7331-2eb6-4765-8858-027c5bdbedd1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.100975] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c52af19-adc4-43c3-b8ff-01e86406674a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.114833] env[69994]: DEBUG nova.compute.provider_tree [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1237.199571] env[69994]: DEBUG nova.compute.manager [req-457e5b01-0515-4c2f-bc79-db3f1ffeccb6 req-fa23fd1a-9f51-4d9b-8313-8f04a378867e service nova] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Received event network-vif-plugged-c4c4e36c-239f-4506-bd4b-c442429d76fa {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1237.199798] env[69994]: DEBUG oslo_concurrency.lockutils [req-457e5b01-0515-4c2f-bc79-db3f1ffeccb6 req-fa23fd1a-9f51-4d9b-8313-8f04a378867e service nova] Acquiring lock "70fcf5b1-213f-4ff9-b675-282e7aa30e20-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1237.200016] env[69994]: DEBUG oslo_concurrency.lockutils [req-457e5b01-0515-4c2f-bc79-db3f1ffeccb6 req-fa23fd1a-9f51-4d9b-8313-8f04a378867e service nova] Lock "70fcf5b1-213f-4ff9-b675-282e7aa30e20-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1237.200171] env[69994]: DEBUG oslo_concurrency.lockutils [req-457e5b01-0515-4c2f-bc79-db3f1ffeccb6 req-fa23fd1a-9f51-4d9b-8313-8f04a378867e service nova] Lock "70fcf5b1-213f-4ff9-b675-282e7aa30e20-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1237.200373] env[69994]: DEBUG nova.compute.manager [req-457e5b01-0515-4c2f-bc79-db3f1ffeccb6 req-fa23fd1a-9f51-4d9b-8313-8f04a378867e service nova] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] No waiting events found dispatching network-vif-plugged-c4c4e36c-239f-4506-bd4b-c442429d76fa {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1237.200490] env[69994]: WARNING nova.compute.manager [req-457e5b01-0515-4c2f-bc79-db3f1ffeccb6 req-fa23fd1a-9f51-4d9b-8313-8f04a378867e service nova] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Received unexpected event network-vif-plugged-c4c4e36c-239f-4506-bd4b-c442429d76fa for instance with vm_state active and task_state None. [ 1237.285387] env[69994]: DEBUG nova.network.neutron [None req-8406543b-f4fb-4588-8b3d-97dbd6931403 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Successfully updated port: c4c4e36c-239f-4506-bd4b-c442429d76fa {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1237.454449] env[69994]: DEBUG oslo_vmware.api [None req-20300fb7-2e00-439a-a634-a479773cba21 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242759, 'name': PowerOffVM_Task, 'duration_secs': 0.199432} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.454449] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-20300fb7-2e00-439a-a634-a479773cba21 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1237.454449] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-20300fb7-2e00-439a-a634-a479773cba21 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1237.454449] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-34aff4a8-7b92-4639-9821-8fd972afbc1d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.514138] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-20300fb7-2e00-439a-a634-a479773cba21 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1237.514359] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-20300fb7-2e00-439a-a634-a479773cba21 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1237.514537] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-20300fb7-2e00-439a-a634-a479773cba21 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Deleting the datastore file [datastore1] d31f167f-8248-4aef-aa3c-6bc7259e1a80 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1237.514798] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7d492f94-3472-4498-8679-87ea0d1401ff {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.521069] env[69994]: DEBUG oslo_vmware.api [None req-20300fb7-2e00-439a-a634-a479773cba21 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for the task: (returnval){ [ 1237.521069] env[69994]: value = "task-3242761" [ 1237.521069] env[69994]: _type = "Task" [ 1237.521069] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.528198] env[69994]: DEBUG oslo_vmware.api [None req-20300fb7-2e00-439a-a634-a479773cba21 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242761, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.618798] env[69994]: DEBUG nova.scheduler.client.report [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1237.787978] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8406543b-f4fb-4588-8b3d-97dbd6931403 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "refresh_cache-70fcf5b1-213f-4ff9-b675-282e7aa30e20" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1237.788589] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8406543b-f4fb-4588-8b3d-97dbd6931403 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquired lock "refresh_cache-70fcf5b1-213f-4ff9-b675-282e7aa30e20" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1237.788785] env[69994]: DEBUG nova.network.neutron [None req-8406543b-f4fb-4588-8b3d-97dbd6931403 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1238.031020] env[69994]: DEBUG oslo_vmware.api [None req-20300fb7-2e00-439a-a634-a479773cba21 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242761, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.123477] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.163s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1238.124059] env[69994]: DEBUG nova.compute.manager [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1238.323030] env[69994]: WARNING nova.network.neutron [None req-8406543b-f4fb-4588-8b3d-97dbd6931403 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] d06bf010-e10f-41e7-ad34-421ab075833c already exists in list: networks containing: ['d06bf010-e10f-41e7-ad34-421ab075833c']. ignoring it [ 1238.531455] env[69994]: DEBUG oslo_vmware.api [None req-20300fb7-2e00-439a-a634-a479773cba21 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Task: {'id': task-3242761, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.640173} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.531694] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-20300fb7-2e00-439a-a634-a479773cba21 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1238.531877] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-20300fb7-2e00-439a-a634-a479773cba21 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1238.532066] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-20300fb7-2e00-439a-a634-a479773cba21 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1238.532263] env[69994]: INFO nova.compute.manager [None req-20300fb7-2e00-439a-a634-a479773cba21 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Took 1.61 seconds to destroy the instance on the hypervisor. [ 1238.532563] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-20300fb7-2e00-439a-a634-a479773cba21 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1238.532759] env[69994]: DEBUG nova.compute.manager [-] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1238.532854] env[69994]: DEBUG nova.network.neutron [-] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1238.566097] env[69994]: DEBUG nova.network.neutron [None req-8406543b-f4fb-4588-8b3d-97dbd6931403 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Updating instance_info_cache with network_info: [{"id": "19e03de0-48c7-4499-a84b-4e2bf08e38e7", "address": "fa:16:3e:2d:da:8e", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.137", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19e03de0-48", "ovs_interfaceid": "19e03de0-48c7-4499-a84b-4e2bf08e38e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c4c4e36c-239f-4506-bd4b-c442429d76fa", "address": "fa:16:3e:ba:96:4c", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4c4e36c-23", "ovs_interfaceid": "c4c4e36c-239f-4506-bd4b-c442429d76fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1238.629063] env[69994]: DEBUG nova.compute.utils [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1238.630036] env[69994]: DEBUG nova.compute.manager [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1238.630218] env[69994]: DEBUG nova.network.neutron [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1238.668017] env[69994]: DEBUG nova.policy [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '64b979ffffc94e09bf911bdb89f4796a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ccb64f97e46a4e499df974959db53dcd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1238.924681] env[69994]: DEBUG nova.network.neutron [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Successfully created port: 7ce56c69-d544-4ac1-b891-5678e0fd77ef {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1238.953504] env[69994]: DEBUG nova.compute.manager [req-c8219bda-a904-4ebd-9c55-56164bbdb3db req-ba96ff07-7b1b-48f6-a68c-61a4def58f41 service nova] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Received event network-vif-deleted-9e330706-3213-4a99-b48a-d2e09db34453 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1238.953504] env[69994]: INFO nova.compute.manager [req-c8219bda-a904-4ebd-9c55-56164bbdb3db req-ba96ff07-7b1b-48f6-a68c-61a4def58f41 service nova] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Neutron deleted interface 9e330706-3213-4a99-b48a-d2e09db34453; detaching it from the instance and deleting it from the info cache [ 1238.953504] env[69994]: DEBUG nova.network.neutron [req-c8219bda-a904-4ebd-9c55-56164bbdb3db req-ba96ff07-7b1b-48f6-a68c-61a4def58f41 service nova] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1239.068756] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8406543b-f4fb-4588-8b3d-97dbd6931403 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Releasing lock "refresh_cache-70fcf5b1-213f-4ff9-b675-282e7aa30e20" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1239.069436] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8406543b-f4fb-4588-8b3d-97dbd6931403 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "70fcf5b1-213f-4ff9-b675-282e7aa30e20" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1239.069595] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8406543b-f4fb-4588-8b3d-97dbd6931403 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquired lock "70fcf5b1-213f-4ff9-b675-282e7aa30e20" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1239.070447] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b5ae766-34c8-4c94-9aaf-f995fc0873ff {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.088470] env[69994]: DEBUG nova.virt.hardware [None req-8406543b-f4fb-4588-8b3d-97dbd6931403 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1239.088734] env[69994]: DEBUG nova.virt.hardware [None req-8406543b-f4fb-4588-8b3d-97dbd6931403 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1239.088925] env[69994]: DEBUG nova.virt.hardware [None req-8406543b-f4fb-4588-8b3d-97dbd6931403 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1239.089162] env[69994]: DEBUG nova.virt.hardware [None req-8406543b-f4fb-4588-8b3d-97dbd6931403 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1239.089349] env[69994]: DEBUG nova.virt.hardware [None req-8406543b-f4fb-4588-8b3d-97dbd6931403 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1239.089530] env[69994]: DEBUG nova.virt.hardware [None req-8406543b-f4fb-4588-8b3d-97dbd6931403 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1239.089773] env[69994]: DEBUG nova.virt.hardware [None req-8406543b-f4fb-4588-8b3d-97dbd6931403 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1239.089964] env[69994]: DEBUG nova.virt.hardware [None req-8406543b-f4fb-4588-8b3d-97dbd6931403 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1239.090252] env[69994]: DEBUG nova.virt.hardware [None req-8406543b-f4fb-4588-8b3d-97dbd6931403 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1239.090501] env[69994]: DEBUG nova.virt.hardware [None req-8406543b-f4fb-4588-8b3d-97dbd6931403 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1239.090679] env[69994]: DEBUG nova.virt.hardware [None req-8406543b-f4fb-4588-8b3d-97dbd6931403 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1239.097127] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8406543b-f4fb-4588-8b3d-97dbd6931403 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Reconfiguring VM to attach interface {{(pid=69994) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1239.097706] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4763c747-efab-4c4f-980b-f0effa7a7aeb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.115513] env[69994]: DEBUG oslo_vmware.api [None req-8406543b-f4fb-4588-8b3d-97dbd6931403 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1239.115513] env[69994]: value = "task-3242762" [ 1239.115513] env[69994]: _type = "Task" [ 1239.115513] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.123300] env[69994]: DEBUG oslo_vmware.api [None req-8406543b-f4fb-4588-8b3d-97dbd6931403 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242762, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.133062] env[69994]: DEBUG nova.compute.manager [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1239.227325] env[69994]: DEBUG nova.compute.manager [req-b0fc028e-dbc4-43c3-8179-1e9c4ccd08e6 req-77df0acd-cef8-4fc9-9519-93011a7cdabe service nova] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Received event network-changed-c4c4e36c-239f-4506-bd4b-c442429d76fa {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1239.227584] env[69994]: DEBUG nova.compute.manager [req-b0fc028e-dbc4-43c3-8179-1e9c4ccd08e6 req-77df0acd-cef8-4fc9-9519-93011a7cdabe service nova] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Refreshing instance network info cache due to event network-changed-c4c4e36c-239f-4506-bd4b-c442429d76fa. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1239.227836] env[69994]: DEBUG oslo_concurrency.lockutils [req-b0fc028e-dbc4-43c3-8179-1e9c4ccd08e6 req-77df0acd-cef8-4fc9-9519-93011a7cdabe service nova] Acquiring lock "refresh_cache-70fcf5b1-213f-4ff9-b675-282e7aa30e20" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1239.228024] env[69994]: DEBUG oslo_concurrency.lockutils [req-b0fc028e-dbc4-43c3-8179-1e9c4ccd08e6 req-77df0acd-cef8-4fc9-9519-93011a7cdabe service nova] Acquired lock "refresh_cache-70fcf5b1-213f-4ff9-b675-282e7aa30e20" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1239.228226] env[69994]: DEBUG nova.network.neutron [req-b0fc028e-dbc4-43c3-8179-1e9c4ccd08e6 req-77df0acd-cef8-4fc9-9519-93011a7cdabe service nova] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Refreshing network info cache for port c4c4e36c-239f-4506-bd4b-c442429d76fa {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1239.430047] env[69994]: DEBUG nova.network.neutron [-] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1239.456360] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d05ad978-0a04-4dc4-9bdc-549640feebbb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.466466] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faa0de65-9de7-4ad8-a766-5c52a2509f48 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.499285] env[69994]: DEBUG nova.compute.manager [req-c8219bda-a904-4ebd-9c55-56164bbdb3db req-ba96ff07-7b1b-48f6-a68c-61a4def58f41 service nova] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Detach interface failed, port_id=9e330706-3213-4a99-b48a-d2e09db34453, reason: Instance d31f167f-8248-4aef-aa3c-6bc7259e1a80 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1239.626584] env[69994]: DEBUG oslo_vmware.api [None req-8406543b-f4fb-4588-8b3d-97dbd6931403 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242762, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.924460] env[69994]: DEBUG nova.network.neutron [req-b0fc028e-dbc4-43c3-8179-1e9c4ccd08e6 req-77df0acd-cef8-4fc9-9519-93011a7cdabe service nova] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Updated VIF entry in instance network info cache for port c4c4e36c-239f-4506-bd4b-c442429d76fa. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1239.924914] env[69994]: DEBUG nova.network.neutron [req-b0fc028e-dbc4-43c3-8179-1e9c4ccd08e6 req-77df0acd-cef8-4fc9-9519-93011a7cdabe service nova] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Updating instance_info_cache with network_info: [{"id": "19e03de0-48c7-4499-a84b-4e2bf08e38e7", "address": "fa:16:3e:2d:da:8e", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.137", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19e03de0-48", "ovs_interfaceid": "19e03de0-48c7-4499-a84b-4e2bf08e38e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c4c4e36c-239f-4506-bd4b-c442429d76fa", "address": "fa:16:3e:ba:96:4c", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4c4e36c-23", "ovs_interfaceid": "c4c4e36c-239f-4506-bd4b-c442429d76fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1239.932890] env[69994]: INFO nova.compute.manager [-] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Took 1.40 seconds to deallocate network for instance. [ 1240.125523] env[69994]: DEBUG oslo_vmware.api [None req-8406543b-f4fb-4588-8b3d-97dbd6931403 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242762, 'name': ReconfigVM_Task, 'duration_secs': 0.558644} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.126035] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8406543b-f4fb-4588-8b3d-97dbd6931403 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Releasing lock "70fcf5b1-213f-4ff9-b675-282e7aa30e20" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1240.126269] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-8406543b-f4fb-4588-8b3d-97dbd6931403 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Reconfigured VM to attach interface {{(pid=69994) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1240.145339] env[69994]: DEBUG nova.compute.manager [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1240.171374] env[69994]: DEBUG nova.virt.hardware [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1240.171617] env[69994]: DEBUG nova.virt.hardware [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1240.171908] env[69994]: DEBUG nova.virt.hardware [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1240.171950] env[69994]: DEBUG nova.virt.hardware [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1240.172227] env[69994]: DEBUG nova.virt.hardware [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1240.172455] env[69994]: DEBUG nova.virt.hardware [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1240.172681] env[69994]: DEBUG nova.virt.hardware [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1240.172847] env[69994]: DEBUG nova.virt.hardware [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1240.173213] env[69994]: DEBUG nova.virt.hardware [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1240.173352] env[69994]: DEBUG nova.virt.hardware [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1240.173565] env[69994]: DEBUG nova.virt.hardware [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1240.174586] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e319c617-bcd2-4476-938e-17ab62634237 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.183256] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6555d483-bfd7-40d0-a326-bb85b2e2a8f2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.345699] env[69994]: DEBUG nova.network.neutron [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Successfully updated port: 7ce56c69-d544-4ac1-b891-5678e0fd77ef {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1240.427353] env[69994]: DEBUG oslo_concurrency.lockutils [req-b0fc028e-dbc4-43c3-8179-1e9c4ccd08e6 req-77df0acd-cef8-4fc9-9519-93011a7cdabe service nova] Releasing lock "refresh_cache-70fcf5b1-213f-4ff9-b675-282e7aa30e20" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1240.439584] env[69994]: DEBUG oslo_concurrency.lockutils [None req-20300fb7-2e00-439a-a634-a479773cba21 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1240.439873] env[69994]: DEBUG oslo_concurrency.lockutils [None req-20300fb7-2e00-439a-a634-a479773cba21 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1240.440110] env[69994]: DEBUG nova.objects.instance [None req-20300fb7-2e00-439a-a634-a479773cba21 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lazy-loading 'resources' on Instance uuid d31f167f-8248-4aef-aa3c-6bc7259e1a80 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1240.631256] env[69994]: DEBUG oslo_concurrency.lockutils [None req-8406543b-f4fb-4588-8b3d-97dbd6931403 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "interface-70fcf5b1-213f-4ff9-b675-282e7aa30e20-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.188s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1240.848476] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "refresh_cache-6b73608e-b62f-4292-870c-51f1c686e569" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1240.848646] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquired lock "refresh_cache-6b73608e-b62f-4292-870c-51f1c686e569" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1240.848767] env[69994]: DEBUG nova.network.neutron [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1241.021746] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d3def23-6874-4eeb-862c-245f11fc812f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.028897] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf124c8f-453d-42e6-8599-146fa2c93141 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.059083] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c985bb4-1d7e-4319-830b-b5aede7f33c5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.066493] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b84bcfc-735a-4b2d-a5cf-dcb7b8c3375d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.079289] env[69994]: DEBUG nova.compute.provider_tree [None req-20300fb7-2e00-439a-a634-a479773cba21 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1241.251776] env[69994]: DEBUG nova.compute.manager [req-e0bfb4ae-115a-4482-a1be-3428ed834e25 req-6e3a2cc5-3b25-4369-ac68-b9550978a89b service nova] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Received event network-vif-plugged-7ce56c69-d544-4ac1-b891-5678e0fd77ef {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1241.252020] env[69994]: DEBUG oslo_concurrency.lockutils [req-e0bfb4ae-115a-4482-a1be-3428ed834e25 req-6e3a2cc5-3b25-4369-ac68-b9550978a89b service nova] Acquiring lock "6b73608e-b62f-4292-870c-51f1c686e569-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1241.252204] env[69994]: DEBUG oslo_concurrency.lockutils [req-e0bfb4ae-115a-4482-a1be-3428ed834e25 req-6e3a2cc5-3b25-4369-ac68-b9550978a89b service nova] Lock "6b73608e-b62f-4292-870c-51f1c686e569-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1241.252321] env[69994]: DEBUG oslo_concurrency.lockutils [req-e0bfb4ae-115a-4482-a1be-3428ed834e25 req-6e3a2cc5-3b25-4369-ac68-b9550978a89b service nova] Lock "6b73608e-b62f-4292-870c-51f1c686e569-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1241.252486] env[69994]: DEBUG nova.compute.manager [req-e0bfb4ae-115a-4482-a1be-3428ed834e25 req-6e3a2cc5-3b25-4369-ac68-b9550978a89b service nova] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] No waiting events found dispatching network-vif-plugged-7ce56c69-d544-4ac1-b891-5678e0fd77ef {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1241.252648] env[69994]: WARNING nova.compute.manager [req-e0bfb4ae-115a-4482-a1be-3428ed834e25 req-6e3a2cc5-3b25-4369-ac68-b9550978a89b service nova] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Received unexpected event network-vif-plugged-7ce56c69-d544-4ac1-b891-5678e0fd77ef for instance with vm_state building and task_state spawning. [ 1241.252904] env[69994]: DEBUG nova.compute.manager [req-e0bfb4ae-115a-4482-a1be-3428ed834e25 req-6e3a2cc5-3b25-4369-ac68-b9550978a89b service nova] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Received event network-changed-7ce56c69-d544-4ac1-b891-5678e0fd77ef {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1241.253265] env[69994]: DEBUG nova.compute.manager [req-e0bfb4ae-115a-4482-a1be-3428ed834e25 req-6e3a2cc5-3b25-4369-ac68-b9550978a89b service nova] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Refreshing instance network info cache due to event network-changed-7ce56c69-d544-4ac1-b891-5678e0fd77ef. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1241.253553] env[69994]: DEBUG oslo_concurrency.lockutils [req-e0bfb4ae-115a-4482-a1be-3428ed834e25 req-6e3a2cc5-3b25-4369-ac68-b9550978a89b service nova] Acquiring lock "refresh_cache-6b73608e-b62f-4292-870c-51f1c686e569" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1241.380564] env[69994]: DEBUG nova.network.neutron [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1241.508017] env[69994]: DEBUG nova.network.neutron [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Updating instance_info_cache with network_info: [{"id": "7ce56c69-d544-4ac1-b891-5678e0fd77ef", "address": "fa:16:3e:67:e1:bc", "network": {"id": "84a79aab-0d1e-4aaf-9422-316cb7d239fb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1732515887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccb64f97e46a4e499df974959db53dcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ce56c69-d5", "ovs_interfaceid": "7ce56c69-d544-4ac1-b891-5678e0fd77ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1241.582312] env[69994]: DEBUG nova.scheduler.client.report [None req-20300fb7-2e00-439a-a634-a479773cba21 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1242.010599] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Releasing lock "refresh_cache-6b73608e-b62f-4292-870c-51f1c686e569" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1242.011017] env[69994]: DEBUG nova.compute.manager [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Instance network_info: |[{"id": "7ce56c69-d544-4ac1-b891-5678e0fd77ef", "address": "fa:16:3e:67:e1:bc", "network": {"id": "84a79aab-0d1e-4aaf-9422-316cb7d239fb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1732515887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccb64f97e46a4e499df974959db53dcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ce56c69-d5", "ovs_interfaceid": "7ce56c69-d544-4ac1-b891-5678e0fd77ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1242.011379] env[69994]: DEBUG oslo_concurrency.lockutils [req-e0bfb4ae-115a-4482-a1be-3428ed834e25 req-6e3a2cc5-3b25-4369-ac68-b9550978a89b service nova] Acquired lock "refresh_cache-6b73608e-b62f-4292-870c-51f1c686e569" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1242.011575] env[69994]: DEBUG nova.network.neutron [req-e0bfb4ae-115a-4482-a1be-3428ed834e25 req-6e3a2cc5-3b25-4369-ac68-b9550978a89b service nova] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Refreshing network info cache for port 7ce56c69-d544-4ac1-b891-5678e0fd77ef {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1242.012834] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:67:e1:bc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c2383c27-232e-4745-9b0a-2dcbaabb188b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7ce56c69-d544-4ac1-b891-5678e0fd77ef', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1242.020702] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1242.023591] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1242.024242] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-66f457b5-d9ee-4cb3-9836-d07384902531 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.043783] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1242.043783] env[69994]: value = "task-3242763" [ 1242.043783] env[69994]: _type = "Task" [ 1242.043783] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.051483] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242763, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.087336] env[69994]: DEBUG oslo_concurrency.lockutils [None req-20300fb7-2e00-439a-a634-a479773cba21 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.647s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1242.108746] env[69994]: INFO nova.scheduler.client.report [None req-20300fb7-2e00-439a-a634-a479773cba21 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Deleted allocations for instance d31f167f-8248-4aef-aa3c-6bc7259e1a80 [ 1242.229713] env[69994]: DEBUG nova.network.neutron [req-e0bfb4ae-115a-4482-a1be-3428ed834e25 req-6e3a2cc5-3b25-4369-ac68-b9550978a89b service nova] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Updated VIF entry in instance network info cache for port 7ce56c69-d544-4ac1-b891-5678e0fd77ef. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1242.230185] env[69994]: DEBUG nova.network.neutron [req-e0bfb4ae-115a-4482-a1be-3428ed834e25 req-6e3a2cc5-3b25-4369-ac68-b9550978a89b service nova] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Updating instance_info_cache with network_info: [{"id": "7ce56c69-d544-4ac1-b891-5678e0fd77ef", "address": "fa:16:3e:67:e1:bc", "network": {"id": "84a79aab-0d1e-4aaf-9422-316cb7d239fb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1732515887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccb64f97e46a4e499df974959db53dcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ce56c69-d5", "ovs_interfaceid": "7ce56c69-d544-4ac1-b891-5678e0fd77ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1242.264165] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1768ab89-40f0-4604-99bc-652677c43d63 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "interface-70fcf5b1-213f-4ff9-b675-282e7aa30e20-c4c4e36c-239f-4506-bd4b-c442429d76fa" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1242.264496] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1768ab89-40f0-4604-99bc-652677c43d63 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "interface-70fcf5b1-213f-4ff9-b675-282e7aa30e20-c4c4e36c-239f-4506-bd4b-c442429d76fa" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1242.554348] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242763, 'name': CreateVM_Task, 'duration_secs': 0.336962} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.554549] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1242.555364] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1242.555609] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1242.556043] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1242.556382] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9947fbfa-a054-49a4-89c6-1f7a3e911e04 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.561584] env[69994]: DEBUG oslo_vmware.api [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1242.561584] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]520510e8-a737-d2b4-e1cb-79f02130d69d" [ 1242.561584] env[69994]: _type = "Task" [ 1242.561584] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.570881] env[69994]: DEBUG oslo_vmware.api [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]520510e8-a737-d2b4-e1cb-79f02130d69d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.617009] env[69994]: DEBUG oslo_concurrency.lockutils [None req-20300fb7-2e00-439a-a634-a479773cba21 tempest-ServerActionsTestOtherB-1755477021 tempest-ServerActionsTestOtherB-1755477021-project-member] Lock "d31f167f-8248-4aef-aa3c-6bc7259e1a80" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.198s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1242.732710] env[69994]: DEBUG oslo_concurrency.lockutils [req-e0bfb4ae-115a-4482-a1be-3428ed834e25 req-6e3a2cc5-3b25-4369-ac68-b9550978a89b service nova] Releasing lock "refresh_cache-6b73608e-b62f-4292-870c-51f1c686e569" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1242.766919] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1768ab89-40f0-4604-99bc-652677c43d63 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "70fcf5b1-213f-4ff9-b675-282e7aa30e20" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1242.767119] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1768ab89-40f0-4604-99bc-652677c43d63 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquired lock "70fcf5b1-213f-4ff9-b675-282e7aa30e20" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1242.767983] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74c3e7b0-bfde-4c42-9ef3-9094703f3c02 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.785745] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5cebb2c-0e46-470a-b5ce-5558db20d5fa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.812056] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1768ab89-40f0-4604-99bc-652677c43d63 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Reconfiguring VM to detach interface {{(pid=69994) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1242.812290] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f954b18f-729c-457b-8a32-8b2e2207f47e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.830235] env[69994]: DEBUG oslo_vmware.api [None req-1768ab89-40f0-4604-99bc-652677c43d63 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1242.830235] env[69994]: value = "task-3242765" [ 1242.830235] env[69994]: _type = "Task" [ 1242.830235] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.837993] env[69994]: DEBUG oslo_vmware.api [None req-1768ab89-40f0-4604-99bc-652677c43d63 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242765, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.071310] env[69994]: DEBUG oslo_vmware.api [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]520510e8-a737-d2b4-e1cb-79f02130d69d, 'name': SearchDatastore_Task, 'duration_secs': 0.011925} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.071553] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1243.071778] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1243.072016] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1243.072163] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1243.072342] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1243.072590] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-afe3748a-f6d3-4b30-8abc-88232748280b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.080753] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1243.080918] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1243.081608] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cdd64052-d347-42ad-973a-9c3b82a5db25 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.086331] env[69994]: DEBUG oslo_vmware.api [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1243.086331] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5240b1ed-e83d-35d7-d525-f5af0949ef5d" [ 1243.086331] env[69994]: _type = "Task" [ 1243.086331] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.094814] env[69994]: DEBUG oslo_vmware.api [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5240b1ed-e83d-35d7-d525-f5af0949ef5d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.339909] env[69994]: DEBUG oslo_vmware.api [None req-1768ab89-40f0-4604-99bc-652677c43d63 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242765, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.597056] env[69994]: DEBUG oslo_vmware.api [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5240b1ed-e83d-35d7-d525-f5af0949ef5d, 'name': SearchDatastore_Task, 'duration_secs': 0.007333} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.597770] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b4a1d80-b4bb-4c83-bc7f-541058521af4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.602542] env[69994]: DEBUG oslo_vmware.api [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1243.602542] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52a1da3c-5172-1c75-0d70-b6c59729b831" [ 1243.602542] env[69994]: _type = "Task" [ 1243.602542] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.609708] env[69994]: DEBUG oslo_vmware.api [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52a1da3c-5172-1c75-0d70-b6c59729b831, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.840417] env[69994]: DEBUG oslo_vmware.api [None req-1768ab89-40f0-4604-99bc-652677c43d63 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242765, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.112639] env[69994]: DEBUG oslo_vmware.api [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52a1da3c-5172-1c75-0d70-b6c59729b831, 'name': SearchDatastore_Task, 'duration_secs': 0.009435} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.112894] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1244.113196] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 6b73608e-b62f-4292-870c-51f1c686e569/6b73608e-b62f-4292-870c-51f1c686e569.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1244.113439] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2e7f264a-1f07-49c3-a3bd-8abe833e0bc6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.120773] env[69994]: DEBUG oslo_vmware.api [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1244.120773] env[69994]: value = "task-3242766" [ 1244.120773] env[69994]: _type = "Task" [ 1244.120773] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.128462] env[69994]: DEBUG oslo_vmware.api [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242766, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.343285] env[69994]: DEBUG oslo_vmware.api [None req-1768ab89-40f0-4604-99bc-652677c43d63 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242765, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.630752] env[69994]: DEBUG oslo_vmware.api [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242766, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.434966} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.631019] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 6b73608e-b62f-4292-870c-51f1c686e569/6b73608e-b62f-4292-870c-51f1c686e569.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1244.631245] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1244.631496] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b4226f65-5507-46d5-9213-b443d55d787d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.638176] env[69994]: DEBUG oslo_vmware.api [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1244.638176] env[69994]: value = "task-3242767" [ 1244.638176] env[69994]: _type = "Task" [ 1244.638176] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.645340] env[69994]: DEBUG oslo_vmware.api [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242767, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.841646] env[69994]: DEBUG oslo_vmware.api [None req-1768ab89-40f0-4604-99bc-652677c43d63 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242765, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.147461] env[69994]: DEBUG oslo_vmware.api [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242767, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059082} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.147751] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1245.148521] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f6f2b0b-34b5-4771-90c8-0f8d51d8f92e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.170131] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Reconfiguring VM instance instance-0000006d to attach disk [datastore2] 6b73608e-b62f-4292-870c-51f1c686e569/6b73608e-b62f-4292-870c-51f1c686e569.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1245.170355] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ad9274ff-a963-4d44-ab12-bcb7900a11b3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.188876] env[69994]: DEBUG oslo_vmware.api [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1245.188876] env[69994]: value = "task-3242768" [ 1245.188876] env[69994]: _type = "Task" [ 1245.188876] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.196485] env[69994]: DEBUG oslo_vmware.api [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242768, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.342184] env[69994]: DEBUG oslo_vmware.api [None req-1768ab89-40f0-4604-99bc-652677c43d63 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242765, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.698824] env[69994]: DEBUG oslo_vmware.api [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242768, 'name': ReconfigVM_Task, 'duration_secs': 0.281727} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.699118] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Reconfigured VM instance instance-0000006d to attach disk [datastore2] 6b73608e-b62f-4292-870c-51f1c686e569/6b73608e-b62f-4292-870c-51f1c686e569.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1245.699659] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-53706d57-0305-4933-b389-56bcfacac97c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.705839] env[69994]: DEBUG oslo_vmware.api [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1245.705839] env[69994]: value = "task-3242769" [ 1245.705839] env[69994]: _type = "Task" [ 1245.705839] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.714252] env[69994]: DEBUG oslo_vmware.api [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242769, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.842988] env[69994]: DEBUG oslo_vmware.api [None req-1768ab89-40f0-4604-99bc-652677c43d63 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242765, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.215241] env[69994]: DEBUG oslo_vmware.api [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242769, 'name': Rename_Task, 'duration_secs': 0.142956} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.215602] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1246.215846] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-928d25bd-5265-41a9-be76-0686674a3fed {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.222111] env[69994]: DEBUG oslo_vmware.api [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1246.222111] env[69994]: value = "task-3242770" [ 1246.222111] env[69994]: _type = "Task" [ 1246.222111] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.230300] env[69994]: DEBUG oslo_vmware.api [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242770, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.349469] env[69994]: DEBUG oslo_vmware.api [None req-1768ab89-40f0-4604-99bc-652677c43d63 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242765, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.732600] env[69994]: DEBUG oslo_vmware.api [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242770, 'name': PowerOnVM_Task, 'duration_secs': 0.427767} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.736156] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1246.736156] env[69994]: INFO nova.compute.manager [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Took 6.59 seconds to spawn the instance on the hypervisor. [ 1246.736156] env[69994]: DEBUG nova.compute.manager [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1246.736156] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40a6bd65-e996-442a-a09c-b9bfaee26324 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.845231] env[69994]: DEBUG oslo_vmware.api [None req-1768ab89-40f0-4604-99bc-652677c43d63 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242765, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.255043] env[69994]: INFO nova.compute.manager [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Took 11.31 seconds to build instance. [ 1247.351577] env[69994]: DEBUG oslo_vmware.api [None req-1768ab89-40f0-4604-99bc-652677c43d63 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242765, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.758049] env[69994]: DEBUG oslo_concurrency.lockutils [None req-04e8fc0a-4fa2-49df-bf53-f3083fed36f4 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "6b73608e-b62f-4292-870c-51f1c686e569" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.821s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1247.848130] env[69994]: DEBUG oslo_vmware.api [None req-1768ab89-40f0-4604-99bc-652677c43d63 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242765, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.027014] env[69994]: DEBUG nova.compute.manager [req-426245c2-b832-4174-bb39-46506af2aa92 req-86e91491-cea1-43ed-9d1c-aa12f8b85968 service nova] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Received event network-changed-7ce56c69-d544-4ac1-b891-5678e0fd77ef {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1248.027264] env[69994]: DEBUG nova.compute.manager [req-426245c2-b832-4174-bb39-46506af2aa92 req-86e91491-cea1-43ed-9d1c-aa12f8b85968 service nova] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Refreshing instance network info cache due to event network-changed-7ce56c69-d544-4ac1-b891-5678e0fd77ef. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1248.027486] env[69994]: DEBUG oslo_concurrency.lockutils [req-426245c2-b832-4174-bb39-46506af2aa92 req-86e91491-cea1-43ed-9d1c-aa12f8b85968 service nova] Acquiring lock "refresh_cache-6b73608e-b62f-4292-870c-51f1c686e569" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1248.027641] env[69994]: DEBUG oslo_concurrency.lockutils [req-426245c2-b832-4174-bb39-46506af2aa92 req-86e91491-cea1-43ed-9d1c-aa12f8b85968 service nova] Acquired lock "refresh_cache-6b73608e-b62f-4292-870c-51f1c686e569" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1248.027807] env[69994]: DEBUG nova.network.neutron [req-426245c2-b832-4174-bb39-46506af2aa92 req-86e91491-cea1-43ed-9d1c-aa12f8b85968 service nova] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Refreshing network info cache for port 7ce56c69-d544-4ac1-b891-5678e0fd77ef {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1248.347916] env[69994]: DEBUG oslo_vmware.api [None req-1768ab89-40f0-4604-99bc-652677c43d63 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242765, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.746346] env[69994]: DEBUG nova.network.neutron [req-426245c2-b832-4174-bb39-46506af2aa92 req-86e91491-cea1-43ed-9d1c-aa12f8b85968 service nova] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Updated VIF entry in instance network info cache for port 7ce56c69-d544-4ac1-b891-5678e0fd77ef. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1248.746705] env[69994]: DEBUG nova.network.neutron [req-426245c2-b832-4174-bb39-46506af2aa92 req-86e91491-cea1-43ed-9d1c-aa12f8b85968 service nova] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Updating instance_info_cache with network_info: [{"id": "7ce56c69-d544-4ac1-b891-5678e0fd77ef", "address": "fa:16:3e:67:e1:bc", "network": {"id": "84a79aab-0d1e-4aaf-9422-316cb7d239fb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1732515887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccb64f97e46a4e499df974959db53dcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ce56c69-d5", "ovs_interfaceid": "7ce56c69-d544-4ac1-b891-5678e0fd77ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1248.770405] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Acquiring lock "b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1248.770663] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Lock "b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1248.848435] env[69994]: DEBUG oslo_vmware.api [None req-1768ab89-40f0-4604-99bc-652677c43d63 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242765, 'name': ReconfigVM_Task} progress is 18%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.249466] env[69994]: DEBUG oslo_concurrency.lockutils [req-426245c2-b832-4174-bb39-46506af2aa92 req-86e91491-cea1-43ed-9d1c-aa12f8b85968 service nova] Releasing lock "refresh_cache-6b73608e-b62f-4292-870c-51f1c686e569" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1249.272898] env[69994]: DEBUG nova.compute.manager [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1249.350141] env[69994]: DEBUG oslo_vmware.api [None req-1768ab89-40f0-4604-99bc-652677c43d63 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242765, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.494141] env[69994]: DEBUG nova.compute.manager [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Stashing vm_state: active {{(pid=69994) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1249.794741] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1249.795025] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1249.796479] env[69994]: INFO nova.compute.claims [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1249.849830] env[69994]: DEBUG oslo_vmware.api [None req-1768ab89-40f0-4604-99bc-652677c43d63 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242765, 'name': ReconfigVM_Task, 'duration_secs': 6.535125} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.850062] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1768ab89-40f0-4604-99bc-652677c43d63 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Releasing lock "70fcf5b1-213f-4ff9-b675-282e7aa30e20" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1249.850268] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1768ab89-40f0-4604-99bc-652677c43d63 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Reconfigured VM to detach interface {{(pid=69994) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1250.011054] env[69994]: DEBUG oslo_concurrency.lockutils [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1250.108668] env[69994]: DEBUG nova.compute.manager [req-4608f9ac-c7b3-4a45-8048-2e8206e54618 req-5082f4d5-4f15-4aa5-965c-27d9f428d8de service nova] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Received event network-vif-deleted-c4c4e36c-239f-4506-bd4b-c442429d76fa {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1250.108739] env[69994]: INFO nova.compute.manager [req-4608f9ac-c7b3-4a45-8048-2e8206e54618 req-5082f4d5-4f15-4aa5-965c-27d9f428d8de service nova] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Neutron deleted interface c4c4e36c-239f-4506-bd4b-c442429d76fa; detaching it from the instance and deleting it from the info cache [ 1250.108976] env[69994]: DEBUG nova.network.neutron [req-4608f9ac-c7b3-4a45-8048-2e8206e54618 req-5082f4d5-4f15-4aa5-965c-27d9f428d8de service nova] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Updating instance_info_cache with network_info: [{"id": "19e03de0-48c7-4499-a84b-4e2bf08e38e7", "address": "fa:16:3e:2d:da:8e", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.137", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19e03de0-48", "ovs_interfaceid": "19e03de0-48c7-4499-a84b-4e2bf08e38e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1250.612403] env[69994]: DEBUG oslo_concurrency.lockutils [req-4608f9ac-c7b3-4a45-8048-2e8206e54618 req-5082f4d5-4f15-4aa5-965c-27d9f428d8de service nova] Acquiring lock "70fcf5b1-213f-4ff9-b675-282e7aa30e20" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1250.612403] env[69994]: DEBUG oslo_concurrency.lockutils [req-4608f9ac-c7b3-4a45-8048-2e8206e54618 req-5082f4d5-4f15-4aa5-965c-27d9f428d8de service nova] Acquired lock "70fcf5b1-213f-4ff9-b675-282e7aa30e20" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1250.612840] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af3dce41-9278-41db-ae7b-29f7c125e06a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.634849] env[69994]: DEBUG oslo_concurrency.lockutils [req-4608f9ac-c7b3-4a45-8048-2e8206e54618 req-5082f4d5-4f15-4aa5-965c-27d9f428d8de service nova] Releasing lock "70fcf5b1-213f-4ff9-b675-282e7aa30e20" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1250.634849] env[69994]: WARNING nova.compute.manager [req-4608f9ac-c7b3-4a45-8048-2e8206e54618 req-5082f4d5-4f15-4aa5-965c-27d9f428d8de service nova] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Detach interface failed, port_id=c4c4e36c-239f-4506-bd4b-c442429d76fa, reason: No device with interface-id c4c4e36c-239f-4506-bd4b-c442429d76fa exists on VM: nova.exception.NotFound: No device with interface-id c4c4e36c-239f-4506-bd4b-c442429d76fa exists on VM [ 1250.891712] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ded5cff0-b5ff-4005-983d-e982290961ed {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.899861] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8286140-3cf7-4c81-a9e3-3dbecb7e9c33 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.929957] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3385e270-94b9-4c50-8e1a-71129c74a5d4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.936569] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ea9b7b2-c0e3-4e5d-9b8f-35cad2426e78 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.949459] env[69994]: DEBUG nova.compute.provider_tree [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1251.074609] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3dfcc7c8-17b0-4d80-8ef7-d7aca755bb8d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "70fcf5b1-213f-4ff9-b675-282e7aa30e20" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1251.074900] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3dfcc7c8-17b0-4d80-8ef7-d7aca755bb8d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "70fcf5b1-213f-4ff9-b675-282e7aa30e20" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1251.075161] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3dfcc7c8-17b0-4d80-8ef7-d7aca755bb8d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "70fcf5b1-213f-4ff9-b675-282e7aa30e20-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1251.075365] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3dfcc7c8-17b0-4d80-8ef7-d7aca755bb8d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "70fcf5b1-213f-4ff9-b675-282e7aa30e20-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1251.075668] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3dfcc7c8-17b0-4d80-8ef7-d7aca755bb8d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "70fcf5b1-213f-4ff9-b675-282e7aa30e20-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1251.077996] env[69994]: INFO nova.compute.manager [None req-3dfcc7c8-17b0-4d80-8ef7-d7aca755bb8d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Terminating instance [ 1251.092115] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1768ab89-40f0-4604-99bc-652677c43d63 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "refresh_cache-70fcf5b1-213f-4ff9-b675-282e7aa30e20" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1251.092445] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1768ab89-40f0-4604-99bc-652677c43d63 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquired lock "refresh_cache-70fcf5b1-213f-4ff9-b675-282e7aa30e20" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1251.092534] env[69994]: DEBUG nova.network.neutron [None req-1768ab89-40f0-4604-99bc-652677c43d63 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1251.452793] env[69994]: DEBUG nova.scheduler.client.report [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1251.582030] env[69994]: DEBUG nova.compute.manager [None req-3dfcc7c8-17b0-4d80-8ef7-d7aca755bb8d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1251.582275] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3dfcc7c8-17b0-4d80-8ef7-d7aca755bb8d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1251.583250] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e61e536e-89ac-4db3-956e-35e6f05e6f3d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.592364] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dfcc7c8-17b0-4d80-8ef7-d7aca755bb8d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1251.592599] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b5b9cc42-ccdf-4022-af81-a23274ffb042 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.598352] env[69994]: DEBUG oslo_vmware.api [None req-3dfcc7c8-17b0-4d80-8ef7-d7aca755bb8d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1251.598352] env[69994]: value = "task-3242772" [ 1251.598352] env[69994]: _type = "Task" [ 1251.598352] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.606358] env[69994]: DEBUG oslo_vmware.api [None req-3dfcc7c8-17b0-4d80-8ef7-d7aca755bb8d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242772, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.842131] env[69994]: DEBUG nova.network.neutron [None req-1768ab89-40f0-4604-99bc-652677c43d63 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Updating instance_info_cache with network_info: [{"id": "19e03de0-48c7-4499-a84b-4e2bf08e38e7", "address": "fa:16:3e:2d:da:8e", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.137", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19e03de0-48", "ovs_interfaceid": "19e03de0-48c7-4499-a84b-4e2bf08e38e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1251.958722] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.163s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1251.959490] env[69994]: DEBUG nova.compute.manager [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1251.963330] env[69994]: DEBUG oslo_concurrency.lockutils [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 1.952s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1252.107854] env[69994]: DEBUG oslo_vmware.api [None req-3dfcc7c8-17b0-4d80-8ef7-d7aca755bb8d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242772, 'name': PowerOffVM_Task, 'duration_secs': 0.214326} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.108170] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dfcc7c8-17b0-4d80-8ef7-d7aca755bb8d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1252.108358] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3dfcc7c8-17b0-4d80-8ef7-d7aca755bb8d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1252.108599] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9ebbddea-d629-4f64-a6de-4b1d0c1fa542 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.172656] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3dfcc7c8-17b0-4d80-8ef7-d7aca755bb8d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1252.172859] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3dfcc7c8-17b0-4d80-8ef7-d7aca755bb8d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1252.173060] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dfcc7c8-17b0-4d80-8ef7-d7aca755bb8d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Deleting the datastore file [datastore2] 70fcf5b1-213f-4ff9-b675-282e7aa30e20 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1252.173397] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-515ca246-c0bc-40da-a527-5c711379f29f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.179932] env[69994]: DEBUG oslo_vmware.api [None req-3dfcc7c8-17b0-4d80-8ef7-d7aca755bb8d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1252.179932] env[69994]: value = "task-3242775" [ 1252.179932] env[69994]: _type = "Task" [ 1252.179932] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.187277] env[69994]: DEBUG oslo_vmware.api [None req-3dfcc7c8-17b0-4d80-8ef7-d7aca755bb8d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242775, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.344877] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1768ab89-40f0-4604-99bc-652677c43d63 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Releasing lock "refresh_cache-70fcf5b1-213f-4ff9-b675-282e7aa30e20" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1252.467318] env[69994]: DEBUG nova.compute.utils [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1252.470725] env[69994]: INFO nova.compute.claims [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1252.474231] env[69994]: DEBUG nova.compute.manager [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Not allocating networking since 'none' was specified. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1252.692013] env[69994]: DEBUG oslo_vmware.api [None req-3dfcc7c8-17b0-4d80-8ef7-d7aca755bb8d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242775, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.849109] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1768ab89-40f0-4604-99bc-652677c43d63 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "interface-70fcf5b1-213f-4ff9-b675-282e7aa30e20-c4c4e36c-239f-4506-bd4b-c442429d76fa" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.585s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1252.975268] env[69994]: DEBUG nova.compute.manager [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1252.979723] env[69994]: INFO nova.compute.resource_tracker [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Updating resource usage from migration 2bfc809a-fb4d-4b03-a5ba-2be988727a62 [ 1253.089501] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-354d8aaf-dda6-4db4-83f5-9abdaba3ec4e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.098257] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73e2f018-10ee-4ebf-b1d2-7a53c03cd382 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.130944] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f75d7840-ed13-4fb0-af46-4fdbb369e5c3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.139764] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdef29a3-a8b7-4ac9-b836-d4e91af0035a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.154439] env[69994]: DEBUG nova.compute.provider_tree [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1253.191049] env[69994]: DEBUG oslo_vmware.api [None req-3dfcc7c8-17b0-4d80-8ef7-d7aca755bb8d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242775, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.805516} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.191306] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dfcc7c8-17b0-4d80-8ef7-d7aca755bb8d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1253.191493] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3dfcc7c8-17b0-4d80-8ef7-d7aca755bb8d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1253.191667] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-3dfcc7c8-17b0-4d80-8ef7-d7aca755bb8d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1253.191836] env[69994]: INFO nova.compute.manager [None req-3dfcc7c8-17b0-4d80-8ef7-d7aca755bb8d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Took 1.61 seconds to destroy the instance on the hypervisor. [ 1253.192088] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3dfcc7c8-17b0-4d80-8ef7-d7aca755bb8d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1253.192282] env[69994]: DEBUG nova.compute.manager [-] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1253.192376] env[69994]: DEBUG nova.network.neutron [-] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1253.593568] env[69994]: DEBUG nova.compute.manager [req-3364657c-78d0-42a8-809c-d8753d7f87b9 req-86058095-2ae5-42a8-9556-6a24c6944fd2 service nova] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Received event network-vif-deleted-19e03de0-48c7-4499-a84b-4e2bf08e38e7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1253.593568] env[69994]: INFO nova.compute.manager [req-3364657c-78d0-42a8-809c-d8753d7f87b9 req-86058095-2ae5-42a8-9556-6a24c6944fd2 service nova] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Neutron deleted interface 19e03de0-48c7-4499-a84b-4e2bf08e38e7; detaching it from the instance and deleting it from the info cache [ 1253.593689] env[69994]: DEBUG nova.network.neutron [req-3364657c-78d0-42a8-809c-d8753d7f87b9 req-86058095-2ae5-42a8-9556-6a24c6944fd2 service nova] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1253.657244] env[69994]: DEBUG nova.scheduler.client.report [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1253.994801] env[69994]: DEBUG nova.compute.manager [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1254.022541] env[69994]: DEBUG nova.virt.hardware [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1254.022793] env[69994]: DEBUG nova.virt.hardware [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1254.022953] env[69994]: DEBUG nova.virt.hardware [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1254.023223] env[69994]: DEBUG nova.virt.hardware [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1254.023403] env[69994]: DEBUG nova.virt.hardware [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1254.023557] env[69994]: DEBUG nova.virt.hardware [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1254.023774] env[69994]: DEBUG nova.virt.hardware [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1254.024237] env[69994]: DEBUG nova.virt.hardware [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1254.024361] env[69994]: DEBUG nova.virt.hardware [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1254.024791] env[69994]: DEBUG nova.virt.hardware [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1254.024791] env[69994]: DEBUG nova.virt.hardware [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1254.026017] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21298b76-7b4c-4789-91bb-7cf5e7fe77ae {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.035319] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f3efb84-b712-499a-8649-9897a85b278e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.050423] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Instance VIF info [] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1254.056129] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Creating folder: Project (d7292e3f011f43d78143b55c8067cb7d). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1254.056475] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-63e28596-14f6-4574-965b-5b8fcae3a07c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.070745] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Created folder: Project (d7292e3f011f43d78143b55c8067cb7d) in parent group-v647729. [ 1254.070969] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Creating folder: Instances. Parent ref: group-v648047. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1254.071302] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3c2c248b-d95a-43f1-940d-54eb623ad7ac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.073337] env[69994]: DEBUG nova.network.neutron [-] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1254.082896] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Created folder: Instances in parent group-v648047. [ 1254.083217] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1254.083995] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1254.084251] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c35cf86a-4981-4cca-97c0-09b836ad8b4b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.098152] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3053cb14-ef67-4b5c-8429-3f90ae0d2f3f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.109386] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9391e4fa-d3c4-472d-9221-285b402b33f4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.120025] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1254.120025] env[69994]: value = "task-3242779" [ 1254.120025] env[69994]: _type = "Task" [ 1254.120025] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.129578] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242779, 'name': CreateVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.139179] env[69994]: DEBUG nova.compute.manager [req-3364657c-78d0-42a8-809c-d8753d7f87b9 req-86058095-2ae5-42a8-9556-6a24c6944fd2 service nova] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Detach interface failed, port_id=19e03de0-48c7-4499-a84b-4e2bf08e38e7, reason: Instance 70fcf5b1-213f-4ff9-b675-282e7aa30e20 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1254.162925] env[69994]: DEBUG oslo_concurrency.lockutils [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.199s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1254.162925] env[69994]: INFO nova.compute.manager [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Migrating [ 1254.577807] env[69994]: INFO nova.compute.manager [-] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Took 1.39 seconds to deallocate network for instance. [ 1254.630871] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242779, 'name': CreateVM_Task, 'duration_secs': 0.284585} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.631044] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1254.631465] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1254.631621] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1254.631947] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1254.632210] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ccd0bd7-2293-4490-adb3-27aa707f0482 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.637406] env[69994]: DEBUG oslo_vmware.api [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Waiting for the task: (returnval){ [ 1254.637406] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52eacf42-78e8-06db-fec9-96b899157aca" [ 1254.637406] env[69994]: _type = "Task" [ 1254.637406] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.646238] env[69994]: DEBUG oslo_vmware.api [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52eacf42-78e8-06db-fec9-96b899157aca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.678431] env[69994]: DEBUG oslo_concurrency.lockutils [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "refresh_cache-eea243fb-97fc-4c65-8699-1b3c321bd250" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1254.678658] env[69994]: DEBUG oslo_concurrency.lockutils [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquired lock "refresh_cache-eea243fb-97fc-4c65-8699-1b3c321bd250" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1254.678795] env[69994]: DEBUG nova.network.neutron [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1255.084819] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3dfcc7c8-17b0-4d80-8ef7-d7aca755bb8d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1255.085216] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3dfcc7c8-17b0-4d80-8ef7-d7aca755bb8d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1255.085216] env[69994]: DEBUG nova.objects.instance [None req-3dfcc7c8-17b0-4d80-8ef7-d7aca755bb8d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lazy-loading 'resources' on Instance uuid 70fcf5b1-213f-4ff9-b675-282e7aa30e20 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1255.147829] env[69994]: DEBUG oslo_vmware.api [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52eacf42-78e8-06db-fec9-96b899157aca, 'name': SearchDatastore_Task, 'duration_secs': 0.011016} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.148134] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1255.148372] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1255.148602] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1255.148747] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1255.148922] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1255.149202] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-40438485-6b35-44cd-a95c-62b68cd5df31 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.158677] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1255.158885] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1255.159615] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15602eb1-5361-4696-a910-965d069d619d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.165353] env[69994]: DEBUG oslo_vmware.api [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Waiting for the task: (returnval){ [ 1255.165353] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]520a8658-fe1f-61cc-1a7b-cb0f4d3f000a" [ 1255.165353] env[69994]: _type = "Task" [ 1255.165353] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.172891] env[69994]: DEBUG oslo_vmware.api [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]520a8658-fe1f-61cc-1a7b-cb0f4d3f000a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.388051] env[69994]: DEBUG nova.network.neutron [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Updating instance_info_cache with network_info: [{"id": "a8446ab9-60ee-4fc2-8b75-e53b3b39a38f", "address": "fa:16:3e:3d:0c:3b", "network": {"id": "ac8cc640-01c3-4a64-8b69-1458ee2131a1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1685085861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38d5a89ed7c248c3be506ef12caf5f1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8446ab9-60", "ovs_interfaceid": "a8446ab9-60ee-4fc2-8b75-e53b3b39a38f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1255.678531] env[69994]: DEBUG oslo_vmware.api [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]520a8658-fe1f-61cc-1a7b-cb0f4d3f000a, 'name': SearchDatastore_Task, 'duration_secs': 0.009441} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.679248] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbc9bb5b-8216-4eef-a18d-d05b9359a321 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.684612] env[69994]: DEBUG oslo_vmware.api [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Waiting for the task: (returnval){ [ 1255.684612] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52c2f12a-12a1-7aa4-425f-ffbc4f8ef09b" [ 1255.684612] env[69994]: _type = "Task" [ 1255.684612] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.689071] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdd3ae6c-c7ce-498f-99d9-96fdc0275268 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.696524] env[69994]: DEBUG oslo_vmware.api [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52c2f12a-12a1-7aa4-425f-ffbc4f8ef09b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.699527] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e6253e1-7750-464c-8bc8-41c9e8dc00de {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.730055] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec91177c-c609-4ef3-bd6f-55ce7d13f253 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.738404] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89fbc9c7-ea0c-477a-bf32-cf3ececac3c5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.752426] env[69994]: DEBUG nova.compute.provider_tree [None req-3dfcc7c8-17b0-4d80-8ef7-d7aca755bb8d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1255.891281] env[69994]: DEBUG oslo_concurrency.lockutils [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Releasing lock "refresh_cache-eea243fb-97fc-4c65-8699-1b3c321bd250" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1256.195566] env[69994]: DEBUG oslo_vmware.api [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52c2f12a-12a1-7aa4-425f-ffbc4f8ef09b, 'name': SearchDatastore_Task, 'duration_secs': 0.013897} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.195566] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1256.195806] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff/b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1256.195994] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-251c2d6c-3fa4-4d0c-aee2-b89cac4cfb76 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.203413] env[69994]: DEBUG oslo_vmware.api [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Waiting for the task: (returnval){ [ 1256.203413] env[69994]: value = "task-3242781" [ 1256.203413] env[69994]: _type = "Task" [ 1256.203413] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.213404] env[69994]: DEBUG oslo_vmware.api [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Task: {'id': task-3242781, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.256333] env[69994]: DEBUG nova.scheduler.client.report [None req-3dfcc7c8-17b0-4d80-8ef7-d7aca755bb8d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1256.713908] env[69994]: DEBUG oslo_vmware.api [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Task: {'id': task-3242781, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.477318} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.714263] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff/b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1256.714523] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1256.714774] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-228511ca-acf3-4916-940e-79170e687560 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.721999] env[69994]: DEBUG oslo_vmware.api [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Waiting for the task: (returnval){ [ 1256.721999] env[69994]: value = "task-3242782" [ 1256.721999] env[69994]: _type = "Task" [ 1256.721999] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.729630] env[69994]: DEBUG oslo_vmware.api [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Task: {'id': task-3242782, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.761690] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3dfcc7c8-17b0-4d80-8ef7-d7aca755bb8d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.677s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1256.778465] env[69994]: INFO nova.scheduler.client.report [None req-3dfcc7c8-17b0-4d80-8ef7-d7aca755bb8d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Deleted allocations for instance 70fcf5b1-213f-4ff9-b675-282e7aa30e20 [ 1257.231919] env[69994]: DEBUG oslo_vmware.api [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Task: {'id': task-3242782, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061244} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.232409] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1257.233204] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6c543c3-3e6c-4207-9367-9efaa3a547ab {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.253567] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Reconfiguring VM instance instance-0000006e to attach disk [datastore2] b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff/b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1257.253930] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-06bb7500-f2d9-4749-9b42-e548653c3f4f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.274978] env[69994]: DEBUG oslo_vmware.api [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Waiting for the task: (returnval){ [ 1257.274978] env[69994]: value = "task-3242783" [ 1257.274978] env[69994]: _type = "Task" [ 1257.274978] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.284820] env[69994]: DEBUG oslo_vmware.api [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Task: {'id': task-3242783, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.285275] env[69994]: DEBUG oslo_concurrency.lockutils [None req-3dfcc7c8-17b0-4d80-8ef7-d7aca755bb8d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "70fcf5b1-213f-4ff9-b675-282e7aa30e20" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.210s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1257.406058] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f1425d3-6edf-4026-801d-b9f82f2f13b3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.426261] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Updating instance 'eea243fb-97fc-4c65-8699-1b3c321bd250' progress to 0 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1257.785248] env[69994]: DEBUG oslo_vmware.api [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Task: {'id': task-3242783, 'name': ReconfigVM_Task, 'duration_secs': 0.284326} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.785551] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Reconfigured VM instance instance-0000006e to attach disk [datastore2] b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff/b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1257.786176] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b2ad6201-4d50-40eb-ac55-44b3ae9a4778 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.793155] env[69994]: DEBUG oslo_vmware.api [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Waiting for the task: (returnval){ [ 1257.793155] env[69994]: value = "task-3242784" [ 1257.793155] env[69994]: _type = "Task" [ 1257.793155] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.801290] env[69994]: DEBUG oslo_vmware.api [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Task: {'id': task-3242784, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.932603] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1257.932966] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c1d1201d-716e-4523-a952-2f57563ae678 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.942253] env[69994]: DEBUG oslo_vmware.api [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1257.942253] env[69994]: value = "task-3242785" [ 1257.942253] env[69994]: _type = "Task" [ 1257.942253] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.304177] env[69994]: DEBUG oslo_vmware.api [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Task: {'id': task-3242784, 'name': Rename_Task, 'duration_secs': 0.148155} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.304555] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1258.304698] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-178de8c5-d49d-46d7-b28e-0ad07b868a42 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.311621] env[69994]: DEBUG oslo_vmware.api [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Waiting for the task: (returnval){ [ 1258.311621] env[69994]: value = "task-3242787" [ 1258.311621] env[69994]: _type = "Task" [ 1258.311621] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.319586] env[69994]: DEBUG oslo_vmware.api [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Task: {'id': task-3242787, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.452538] env[69994]: DEBUG oslo_vmware.api [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242785, 'name': PowerOffVM_Task, 'duration_secs': 0.230048} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.453008] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1258.453079] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Updating instance 'eea243fb-97fc-4c65-8699-1b3c321bd250' progress to 17 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1258.822730] env[69994]: DEBUG oslo_vmware.api [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Task: {'id': task-3242787, 'name': PowerOnVM_Task, 'duration_secs': 0.451066} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.822950] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1258.823220] env[69994]: INFO nova.compute.manager [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Took 4.83 seconds to spawn the instance on the hypervisor. [ 1258.823408] env[69994]: DEBUG nova.compute.manager [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1258.824170] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-130af61f-fb23-4e66-899b-c866df1080dd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.960576] env[69994]: DEBUG nova.virt.hardware [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1258.960901] env[69994]: DEBUG nova.virt.hardware [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1258.960997] env[69994]: DEBUG nova.virt.hardware [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1258.961246] env[69994]: DEBUG nova.virt.hardware [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1258.961394] env[69994]: DEBUG nova.virt.hardware [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1258.961533] env[69994]: DEBUG nova.virt.hardware [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1258.961784] env[69994]: DEBUG nova.virt.hardware [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1258.961927] env[69994]: DEBUG nova.virt.hardware [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1258.962112] env[69994]: DEBUG nova.virt.hardware [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1258.962278] env[69994]: DEBUG nova.virt.hardware [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1258.962450] env[69994]: DEBUG nova.virt.hardware [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1258.967689] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6435b895-9651-4e73-8783-b08f4b69a5b9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.984567] env[69994]: DEBUG oslo_vmware.api [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1258.984567] env[69994]: value = "task-3242788" [ 1258.984567] env[69994]: _type = "Task" [ 1258.984567] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.993949] env[69994]: DEBUG oslo_vmware.api [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242788, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.117139] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "1735049d-a240-48fc-a360-3b00b02225b1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1259.117392] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "1735049d-a240-48fc-a360-3b00b02225b1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1259.339996] env[69994]: INFO nova.compute.manager [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Took 9.56 seconds to build instance. [ 1259.494626] env[69994]: DEBUG oslo_vmware.api [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242788, 'name': ReconfigVM_Task, 'duration_secs': 0.178903} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1259.494937] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Updating instance 'eea243fb-97fc-4c65-8699-1b3c321bd250' progress to 33 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1259.619810] env[69994]: DEBUG nova.compute.manager [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1259.842101] env[69994]: DEBUG oslo_concurrency.lockutils [None req-aec06623-28a7-47d3-a84b-4747f472a934 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Lock "b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.071s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1260.001272] env[69994]: DEBUG nova.virt.hardware [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1260.001545] env[69994]: DEBUG nova.virt.hardware [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1260.001703] env[69994]: DEBUG nova.virt.hardware [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1260.001895] env[69994]: DEBUG nova.virt.hardware [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1260.002069] env[69994]: DEBUG nova.virt.hardware [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1260.002241] env[69994]: DEBUG nova.virt.hardware [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1260.002451] env[69994]: DEBUG nova.virt.hardware [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1260.002610] env[69994]: DEBUG nova.virt.hardware [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1260.002774] env[69994]: DEBUG nova.virt.hardware [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1260.002934] env[69994]: DEBUG nova.virt.hardware [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1260.003130] env[69994]: DEBUG nova.virt.hardware [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1260.009102] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Reconfiguring VM instance instance-0000006c to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1260.009428] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0cf69840-11eb-469a-b524-a782f51f31c2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.029549] env[69994]: DEBUG oslo_vmware.api [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1260.029549] env[69994]: value = "task-3242789" [ 1260.029549] env[69994]: _type = "Task" [ 1260.029549] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.040107] env[69994]: DEBUG oslo_vmware.api [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242789, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.140719] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1260.140992] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1260.142535] env[69994]: INFO nova.compute.claims [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1260.539820] env[69994]: DEBUG oslo_vmware.api [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242789, 'name': ReconfigVM_Task, 'duration_secs': 0.274208} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1260.540322] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Reconfigured VM instance instance-0000006c to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1260.541096] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0f490d0-b844-4b96-86a1-77336507da8a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.564557] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] eea243fb-97fc-4c65-8699-1b3c321bd250/eea243fb-97fc-4c65-8699-1b3c321bd250.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1260.564804] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d88f7a4c-0d15-40c3-82e4-e6a2ac166767 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.583505] env[69994]: DEBUG oslo_vmware.api [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1260.583505] env[69994]: value = "task-3242791" [ 1260.583505] env[69994]: _type = "Task" [ 1260.583505] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.591422] env[69994]: DEBUG oslo_vmware.api [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242791, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.766189] env[69994]: DEBUG nova.compute.manager [None req-d36a33b8-d99a-4e35-a2a4-5187424f6a89 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1260.767156] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7677b1b-a5d9-44c2-8b69-804fb3d028aa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.833069] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ed52ed66-5152-4425-97df-efe976feadd3 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Acquiring lock "b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1260.833069] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ed52ed66-5152-4425-97df-efe976feadd3 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Lock "b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1260.833069] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ed52ed66-5152-4425-97df-efe976feadd3 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Acquiring lock "b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1260.833377] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ed52ed66-5152-4425-97df-efe976feadd3 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Lock "b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1260.833556] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ed52ed66-5152-4425-97df-efe976feadd3 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Lock "b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1260.835845] env[69994]: INFO nova.compute.manager [None req-ed52ed66-5152-4425-97df-efe976feadd3 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Terminating instance [ 1261.094012] env[69994]: DEBUG oslo_vmware.api [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242791, 'name': ReconfigVM_Task, 'duration_secs': 0.273639} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.094330] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Reconfigured VM instance instance-0000006c to attach disk [datastore2] eea243fb-97fc-4c65-8699-1b3c321bd250/eea243fb-97fc-4c65-8699-1b3c321bd250.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1261.094589] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Updating instance 'eea243fb-97fc-4c65-8699-1b3c321bd250' progress to 50 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1261.243477] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5be647ca-e252-4129-93c9-9cddcb96006f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.251293] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-593e21f1-f59b-4e90-9b9a-1cb0d4211faa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.281727] env[69994]: INFO nova.compute.manager [None req-d36a33b8-d99a-4e35-a2a4-5187424f6a89 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] instance snapshotting [ 1261.282278] env[69994]: DEBUG nova.objects.instance [None req-d36a33b8-d99a-4e35-a2a4-5187424f6a89 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Lazy-loading 'flavor' on Instance uuid b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1261.284166] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-213f98e0-1329-4835-adfd-856524a5eccc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.292097] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb654d0b-26cb-4d4b-8f05-fb2c0a878043 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.306125] env[69994]: DEBUG nova.compute.provider_tree [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1261.340200] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ed52ed66-5152-4425-97df-efe976feadd3 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Acquiring lock "refresh_cache-b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1261.340384] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ed52ed66-5152-4425-97df-efe976feadd3 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Acquired lock "refresh_cache-b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1261.340569] env[69994]: DEBUG nova.network.neutron [None req-ed52ed66-5152-4425-97df-efe976feadd3 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1261.601251] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b1ea7d1-affd-4014-9b97-6a59937c7707 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.620499] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-459ffffa-7ea2-4225-a5ec-058028e86a7a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.639434] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Updating instance 'eea243fb-97fc-4c65-8699-1b3c321bd250' progress to 67 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1261.790582] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96ab8307-970a-46a3-bfd7-7e53c1ea4a5d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.808226] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c46bb818-72b1-4ca7-a53f-8a5a7898e94c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.811355] env[69994]: DEBUG nova.scheduler.client.report [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1261.859458] env[69994]: DEBUG nova.network.neutron [None req-ed52ed66-5152-4425-97df-efe976feadd3 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1261.915130] env[69994]: DEBUG nova.network.neutron [None req-ed52ed66-5152-4425-97df-efe976feadd3 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1262.180089] env[69994]: DEBUG nova.network.neutron [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Port a8446ab9-60ee-4fc2-8b75-e53b3b39a38f binding to destination host cpu-1 is already ACTIVE {{(pid=69994) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1262.316728] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.175s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1262.317394] env[69994]: DEBUG nova.compute.manager [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1262.323054] env[69994]: DEBUG nova.compute.manager [None req-d36a33b8-d99a-4e35-a2a4-5187424f6a89 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Instance disappeared during snapshot {{(pid=69994) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 1262.418318] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ed52ed66-5152-4425-97df-efe976feadd3 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Releasing lock "refresh_cache-b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1262.418765] env[69994]: DEBUG nova.compute.manager [None req-ed52ed66-5152-4425-97df-efe976feadd3 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1262.418962] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ed52ed66-5152-4425-97df-efe976feadd3 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1262.419824] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52dc41e2-0060-4420-9c83-2deef4d1647a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.430922] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed52ed66-5152-4425-97df-efe976feadd3 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1262.431216] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-39e69c75-a0e3-41c8-b58b-4d141e2fd82a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.437461] env[69994]: DEBUG oslo_vmware.api [None req-ed52ed66-5152-4425-97df-efe976feadd3 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Waiting for the task: (returnval){ [ 1262.437461] env[69994]: value = "task-3242793" [ 1262.437461] env[69994]: _type = "Task" [ 1262.437461] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.445720] env[69994]: DEBUG oslo_vmware.api [None req-ed52ed66-5152-4425-97df-efe976feadd3 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Task: {'id': task-3242793, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.446804] env[69994]: DEBUG nova.compute.manager [None req-d36a33b8-d99a-4e35-a2a4-5187424f6a89 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Found 0 images (rotation: 2) {{(pid=69994) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1262.822646] env[69994]: DEBUG nova.compute.utils [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1262.824067] env[69994]: DEBUG nova.compute.manager [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1262.824319] env[69994]: DEBUG nova.network.neutron [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1262.872731] env[69994]: DEBUG nova.policy [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aa110088642d455baaf060b5c9daaf5b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '70d05b502dfd4c5282872339c1e34d0c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1262.949899] env[69994]: DEBUG oslo_vmware.api [None req-ed52ed66-5152-4425-97df-efe976feadd3 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Task: {'id': task-3242793, 'name': PowerOffVM_Task, 'duration_secs': 0.113011} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1262.950172] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed52ed66-5152-4425-97df-efe976feadd3 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1262.950341] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ed52ed66-5152-4425-97df-efe976feadd3 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1262.950592] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4fbd3b85-57c8-4e86-81f3-49a10bff3f4e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.979873] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ed52ed66-5152-4425-97df-efe976feadd3 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1262.980279] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ed52ed66-5152-4425-97df-efe976feadd3 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1262.980589] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed52ed66-5152-4425-97df-efe976feadd3 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Deleting the datastore file [datastore2] b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1262.981011] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9ed99fff-5a82-4835-acd5-da74630c41a1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.988677] env[69994]: DEBUG oslo_vmware.api [None req-ed52ed66-5152-4425-97df-efe976feadd3 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Waiting for the task: (returnval){ [ 1262.988677] env[69994]: value = "task-3242795" [ 1262.988677] env[69994]: _type = "Task" [ 1262.988677] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.997105] env[69994]: DEBUG oslo_vmware.api [None req-ed52ed66-5152-4425-97df-efe976feadd3 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Task: {'id': task-3242795, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.148031] env[69994]: DEBUG nova.network.neutron [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Successfully created port: 657bb865-1c59-4abc-b02a-bb91154c3cd9 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1263.201850] env[69994]: DEBUG oslo_concurrency.lockutils [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "eea243fb-97fc-4c65-8699-1b3c321bd250-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1263.202045] env[69994]: DEBUG oslo_concurrency.lockutils [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "eea243fb-97fc-4c65-8699-1b3c321bd250-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1263.202284] env[69994]: DEBUG oslo_concurrency.lockutils [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "eea243fb-97fc-4c65-8699-1b3c321bd250-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1263.327898] env[69994]: DEBUG nova.compute.manager [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1263.498541] env[69994]: DEBUG oslo_vmware.api [None req-ed52ed66-5152-4425-97df-efe976feadd3 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Task: {'id': task-3242795, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.096457} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1263.498780] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed52ed66-5152-4425-97df-efe976feadd3 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1263.498960] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ed52ed66-5152-4425-97df-efe976feadd3 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1263.499151] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ed52ed66-5152-4425-97df-efe976feadd3 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1263.499323] env[69994]: INFO nova.compute.manager [None req-ed52ed66-5152-4425-97df-efe976feadd3 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1263.499561] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ed52ed66-5152-4425-97df-efe976feadd3 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1263.499749] env[69994]: DEBUG nova.compute.manager [-] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1263.499844] env[69994]: DEBUG nova.network.neutron [-] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1263.515160] env[69994]: DEBUG nova.network.neutron [-] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1264.017879] env[69994]: DEBUG nova.network.neutron [-] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1264.237363] env[69994]: DEBUG oslo_concurrency.lockutils [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "refresh_cache-eea243fb-97fc-4c65-8699-1b3c321bd250" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1264.237552] env[69994]: DEBUG oslo_concurrency.lockutils [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquired lock "refresh_cache-eea243fb-97fc-4c65-8699-1b3c321bd250" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1264.237729] env[69994]: DEBUG nova.network.neutron [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1264.337067] env[69994]: DEBUG nova.compute.manager [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1264.365939] env[69994]: DEBUG nova.virt.hardware [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1264.366215] env[69994]: DEBUG nova.virt.hardware [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1264.366365] env[69994]: DEBUG nova.virt.hardware [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1264.366546] env[69994]: DEBUG nova.virt.hardware [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1264.366692] env[69994]: DEBUG nova.virt.hardware [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1264.366951] env[69994]: DEBUG nova.virt.hardware [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1264.367059] env[69994]: DEBUG nova.virt.hardware [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1264.367210] env[69994]: DEBUG nova.virt.hardware [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1264.367376] env[69994]: DEBUG nova.virt.hardware [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1264.367540] env[69994]: DEBUG nova.virt.hardware [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1264.367705] env[69994]: DEBUG nova.virt.hardware [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1264.368599] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1f9792c-7d92-4bcd-bf70-9b027d06eca2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.376629] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a47f0400-81a6-460f-8af0-4e111416b115 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.486719] env[69994]: DEBUG nova.compute.manager [req-9ac5e107-9656-40e2-8dc1-e11542da78cb req-71cae101-93a9-48e5-96b6-77dc7e3a3ae8 service nova] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Received event network-vif-plugged-657bb865-1c59-4abc-b02a-bb91154c3cd9 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1264.486942] env[69994]: DEBUG oslo_concurrency.lockutils [req-9ac5e107-9656-40e2-8dc1-e11542da78cb req-71cae101-93a9-48e5-96b6-77dc7e3a3ae8 service nova] Acquiring lock "1735049d-a240-48fc-a360-3b00b02225b1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1264.487167] env[69994]: DEBUG oslo_concurrency.lockutils [req-9ac5e107-9656-40e2-8dc1-e11542da78cb req-71cae101-93a9-48e5-96b6-77dc7e3a3ae8 service nova] Lock "1735049d-a240-48fc-a360-3b00b02225b1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1264.487407] env[69994]: DEBUG oslo_concurrency.lockutils [req-9ac5e107-9656-40e2-8dc1-e11542da78cb req-71cae101-93a9-48e5-96b6-77dc7e3a3ae8 service nova] Lock "1735049d-a240-48fc-a360-3b00b02225b1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1264.487511] env[69994]: DEBUG nova.compute.manager [req-9ac5e107-9656-40e2-8dc1-e11542da78cb req-71cae101-93a9-48e5-96b6-77dc7e3a3ae8 service nova] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] No waiting events found dispatching network-vif-plugged-657bb865-1c59-4abc-b02a-bb91154c3cd9 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1264.487644] env[69994]: WARNING nova.compute.manager [req-9ac5e107-9656-40e2-8dc1-e11542da78cb req-71cae101-93a9-48e5-96b6-77dc7e3a3ae8 service nova] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Received unexpected event network-vif-plugged-657bb865-1c59-4abc-b02a-bb91154c3cd9 for instance with vm_state building and task_state spawning. [ 1264.520032] env[69994]: INFO nova.compute.manager [-] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Took 1.02 seconds to deallocate network for instance. [ 1264.947861] env[69994]: DEBUG nova.network.neutron [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Updating instance_info_cache with network_info: [{"id": "a8446ab9-60ee-4fc2-8b75-e53b3b39a38f", "address": "fa:16:3e:3d:0c:3b", "network": {"id": "ac8cc640-01c3-4a64-8b69-1458ee2131a1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1685085861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38d5a89ed7c248c3be506ef12caf5f1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8446ab9-60", "ovs_interfaceid": "a8446ab9-60ee-4fc2-8b75-e53b3b39a38f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1265.026183] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ed52ed66-5152-4425-97df-efe976feadd3 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1265.026470] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ed52ed66-5152-4425-97df-efe976feadd3 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1265.026675] env[69994]: DEBUG nova.objects.instance [None req-ed52ed66-5152-4425-97df-efe976feadd3 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Lazy-loading 'resources' on Instance uuid b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1265.042035] env[69994]: DEBUG nova.network.neutron [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Successfully updated port: 657bb865-1c59-4abc-b02a-bb91154c3cd9 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1265.060211] env[69994]: DEBUG nova.compute.manager [req-489709c8-9d72-49af-b8e3-01229a1a7db1 req-0392b659-9ddf-4297-9ddf-afc1c27df5e3 service nova] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Received event network-changed-657bb865-1c59-4abc-b02a-bb91154c3cd9 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1265.060211] env[69994]: DEBUG nova.compute.manager [req-489709c8-9d72-49af-b8e3-01229a1a7db1 req-0392b659-9ddf-4297-9ddf-afc1c27df5e3 service nova] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Refreshing instance network info cache due to event network-changed-657bb865-1c59-4abc-b02a-bb91154c3cd9. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1265.060211] env[69994]: DEBUG oslo_concurrency.lockutils [req-489709c8-9d72-49af-b8e3-01229a1a7db1 req-0392b659-9ddf-4297-9ddf-afc1c27df5e3 service nova] Acquiring lock "refresh_cache-1735049d-a240-48fc-a360-3b00b02225b1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1265.060211] env[69994]: DEBUG oslo_concurrency.lockutils [req-489709c8-9d72-49af-b8e3-01229a1a7db1 req-0392b659-9ddf-4297-9ddf-afc1c27df5e3 service nova] Acquired lock "refresh_cache-1735049d-a240-48fc-a360-3b00b02225b1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1265.060211] env[69994]: DEBUG nova.network.neutron [req-489709c8-9d72-49af-b8e3-01229a1a7db1 req-0392b659-9ddf-4297-9ddf-afc1c27df5e3 service nova] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Refreshing network info cache for port 657bb865-1c59-4abc-b02a-bb91154c3cd9 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1265.146095] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1265.146095] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1265.146095] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69994) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1265.450419] env[69994]: DEBUG oslo_concurrency.lockutils [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Releasing lock "refresh_cache-eea243fb-97fc-4c65-8699-1b3c321bd250" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1265.546012] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "refresh_cache-1735049d-a240-48fc-a360-3b00b02225b1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1265.591847] env[69994]: DEBUG nova.network.neutron [req-489709c8-9d72-49af-b8e3-01229a1a7db1 req-0392b659-9ddf-4297-9ddf-afc1c27df5e3 service nova] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1265.624826] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c73e454b-f13c-48e2-ade7-74d65cd39dca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.635232] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3260dd4-ff56-4618-965b-c76e39582613 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.666577] env[69994]: DEBUG nova.network.neutron [req-489709c8-9d72-49af-b8e3-01229a1a7db1 req-0392b659-9ddf-4297-9ddf-afc1c27df5e3 service nova] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1265.669127] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5f36fa9-9bef-462c-80fe-cd301a7a00d3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.678706] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06c276c8-17c1-47e7-9a28-5afd5df5712d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.692882] env[69994]: DEBUG nova.compute.provider_tree [None req-ed52ed66-5152-4425-97df-efe976feadd3 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1265.970908] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7fc1126-c77f-4590-814d-486526e4c75c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.990422] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a546ea7-e96b-4c75-98b6-fd9e94fb38d0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.997517] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Updating instance 'eea243fb-97fc-4c65-8699-1b3c321bd250' progress to 83 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1266.145712] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1266.146017] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1266.146112] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1266.173073] env[69994]: DEBUG oslo_concurrency.lockutils [req-489709c8-9d72-49af-b8e3-01229a1a7db1 req-0392b659-9ddf-4297-9ddf-afc1c27df5e3 service nova] Releasing lock "refresh_cache-1735049d-a240-48fc-a360-3b00b02225b1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1266.173450] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquired lock "refresh_cache-1735049d-a240-48fc-a360-3b00b02225b1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1266.173618] env[69994]: DEBUG nova.network.neutron [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1266.195371] env[69994]: DEBUG nova.scheduler.client.report [None req-ed52ed66-5152-4425-97df-efe976feadd3 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1266.504234] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1266.504496] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8ab0923c-4fb6-497e-ac55-03a12608be21 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.512491] env[69994]: DEBUG oslo_vmware.api [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1266.512491] env[69994]: value = "task-3242797" [ 1266.512491] env[69994]: _type = "Task" [ 1266.512491] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.520515] env[69994]: DEBUG oslo_vmware.api [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242797, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.648969] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1266.699619] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ed52ed66-5152-4425-97df-efe976feadd3 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.673s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1266.701880] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.053s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1266.702076] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1266.702424] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69994) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1266.703101] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d38a150-ec2e-4677-ace7-4c4dded86a43 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.706339] env[69994]: DEBUG nova.network.neutron [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1266.714090] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-493dbb6b-1c53-4f4d-946f-fc32159cb012 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.720009] env[69994]: INFO nova.scheduler.client.report [None req-ed52ed66-5152-4425-97df-efe976feadd3 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Deleted allocations for instance b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff [ 1266.736231] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8928ae06-e187-4d7c-8267-b525a9c89214 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.744672] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f16c376-795f-49a8-8d6a-364f30849967 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.775667] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179988MB free_disk=120GB free_vcpus=48 pci_devices=None {{(pid=69994) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1266.775857] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1266.776103] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1266.881166] env[69994]: DEBUG nova.network.neutron [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Updating instance_info_cache with network_info: [{"id": "657bb865-1c59-4abc-b02a-bb91154c3cd9", "address": "fa:16:3e:9c:01:ca", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap657bb865-1c", "ovs_interfaceid": "657bb865-1c59-4abc-b02a-bb91154c3cd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1267.022690] env[69994]: DEBUG oslo_vmware.api [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242797, 'name': PowerOnVM_Task, 'duration_secs': 0.405286} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1267.022946] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1267.023148] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-94093a77-a04f-46bf-8a1e-8ad6a674df21 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Updating instance 'eea243fb-97fc-4c65-8699-1b3c321bd250' progress to 100 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1267.242316] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ed52ed66-5152-4425-97df-efe976feadd3 tempest-ServersAaction247Test-1942762578 tempest-ServersAaction247Test-1942762578-project-member] Lock "b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.409s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1267.383812] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Releasing lock "refresh_cache-1735049d-a240-48fc-a360-3b00b02225b1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1267.384022] env[69994]: DEBUG nova.compute.manager [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Instance network_info: |[{"id": "657bb865-1c59-4abc-b02a-bb91154c3cd9", "address": "fa:16:3e:9c:01:ca", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap657bb865-1c", "ovs_interfaceid": "657bb865-1c59-4abc-b02a-bb91154c3cd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1267.384619] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9c:01:ca', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a9d50784-eb90-48ae-a4ea-2125c52a50d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '657bb865-1c59-4abc-b02a-bb91154c3cd9', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1267.392209] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1267.393385] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1267.393385] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bd83a897-04d8-41b4-95a4-be67efd0b0d7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.412777] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1267.412777] env[69994]: value = "task-3242798" [ 1267.412777] env[69994]: _type = "Task" [ 1267.412777] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.420637] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242798, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.783800] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Applying migration context for instance eea243fb-97fc-4c65-8699-1b3c321bd250 as it has an incoming, in-progress migration 2bfc809a-fb4d-4b03-a5ba-2be988727a62. Migration status is post-migrating {{(pid=69994) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1267.784769] env[69994]: INFO nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Updating resource usage from migration 2bfc809a-fb4d-4b03-a5ba-2be988727a62 [ 1267.799503] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance ef37ce64-2c26-4080-899a-6d9dbb5850c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1267.799650] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 03e58b14-12fe-46e5-b483-4176d5a43c0e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1267.799769] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 6b73608e-b62f-4292-870c-51f1c686e569 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1267.799886] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Migration 2bfc809a-fb4d-4b03-a5ba-2be988727a62 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1267.800000] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance eea243fb-97fc-4c65-8699-1b3c321bd250 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1267.800128] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 1735049d-a240-48fc-a360-3b00b02225b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1267.923385] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242798, 'name': CreateVM_Task, 'duration_secs': 0.335375} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1267.923539] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1267.924396] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1267.924493] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1267.924745] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1267.924991] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0035c911-3715-4efd-a446-38691ba5c3b3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.929688] env[69994]: DEBUG oslo_vmware.api [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1267.929688] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52974987-5af3-6dfc-514f-928f96809b2c" [ 1267.929688] env[69994]: _type = "Task" [ 1267.929688] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.937566] env[69994]: DEBUG oslo_vmware.api [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52974987-5af3-6dfc-514f-928f96809b2c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1268.243148] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "217bd31d-f705-4aa7-a8a7-d79e407b7c7b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1268.243496] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "217bd31d-f705-4aa7-a8a7-d79e407b7c7b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1268.302362] env[69994]: INFO nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 217bd31d-f705-4aa7-a8a7-d79e407b7c7b has allocations against this compute host but is not found in the database. [ 1268.302599] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1268.302745] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1728MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1268.402050] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45bc5959-ed66-454f-91ce-e227d62a1d08 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.410601] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-691cfab4-412a-454f-ac7f-49a05578abc3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.444593] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3eb63a3-2ed0-4690-ad70-cb55aba4e9fe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.457269] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df9ffd37-8555-4456-ad74-a70c13abbf64 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.461190] env[69994]: DEBUG oslo_vmware.api [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52974987-5af3-6dfc-514f-928f96809b2c, 'name': SearchDatastore_Task, 'duration_secs': 0.010857} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1268.461527] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1268.461759] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1268.461994] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1268.462153] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1268.462602] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1268.462898] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c503b534-3e4f-4c9d-b373-fe6b143601e1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.473137] env[69994]: DEBUG nova.compute.provider_tree [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1268.482915] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1268.483027] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1268.484676] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a4551e9-8eb1-4a13-9754-3af91c17ad66 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.490421] env[69994]: DEBUG oslo_vmware.api [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1268.490421] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52002e40-99c2-acf9-3dda-b767e6a78e89" [ 1268.490421] env[69994]: _type = "Task" [ 1268.490421] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1268.498926] env[69994]: DEBUG oslo_vmware.api [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52002e40-99c2-acf9-3dda-b767e6a78e89, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1268.745492] env[69994]: DEBUG nova.compute.manager [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1268.979023] env[69994]: DEBUG nova.scheduler.client.report [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1269.001085] env[69994]: DEBUG oslo_vmware.api [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52002e40-99c2-acf9-3dda-b767e6a78e89, 'name': SearchDatastore_Task, 'duration_secs': 0.020478} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1269.002415] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cedd142d-d9b2-48dc-a5cf-23bc36ff2d94 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.008357] env[69994]: DEBUG oslo_vmware.api [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1269.008357] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52a8b227-34dc-b6c3-8806-8d715ae119c2" [ 1269.008357] env[69994]: _type = "Task" [ 1269.008357] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.017141] env[69994]: DEBUG oslo_vmware.api [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52a8b227-34dc-b6c3-8806-8d715ae119c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.118270] env[69994]: DEBUG nova.network.neutron [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Port a8446ab9-60ee-4fc2-8b75-e53b3b39a38f binding to destination host cpu-1 is already ACTIVE {{(pid=69994) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1269.118603] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "refresh_cache-eea243fb-97fc-4c65-8699-1b3c321bd250" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1269.118788] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquired lock "refresh_cache-eea243fb-97fc-4c65-8699-1b3c321bd250" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1269.118993] env[69994]: DEBUG nova.network.neutron [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1269.273067] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1269.483024] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69994) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1269.483385] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.707s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1269.483742] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.211s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1269.485526] env[69994]: INFO nova.compute.claims [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1269.523343] env[69994]: DEBUG oslo_vmware.api [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52a8b227-34dc-b6c3-8806-8d715ae119c2, 'name': SearchDatastore_Task, 'duration_secs': 0.013094} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1269.524691] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1269.524691] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 1735049d-a240-48fc-a360-3b00b02225b1/1735049d-a240-48fc-a360-3b00b02225b1.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1269.524691] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bcb16933-a8fb-4720-8027-2d90039db525 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.534138] env[69994]: DEBUG oslo_vmware.api [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1269.534138] env[69994]: value = "task-3242799" [ 1269.534138] env[69994]: _type = "Task" [ 1269.534138] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.545683] env[69994]: DEBUG oslo_vmware.api [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242799, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.849873] env[69994]: DEBUG nova.network.neutron [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Updating instance_info_cache with network_info: [{"id": "a8446ab9-60ee-4fc2-8b75-e53b3b39a38f", "address": "fa:16:3e:3d:0c:3b", "network": {"id": "ac8cc640-01c3-4a64-8b69-1458ee2131a1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1685085861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38d5a89ed7c248c3be506ef12caf5f1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8446ab9-60", "ovs_interfaceid": "a8446ab9-60ee-4fc2-8b75-e53b3b39a38f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1270.044950] env[69994]: DEBUG oslo_vmware.api [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242799, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.351986] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Releasing lock "refresh_cache-eea243fb-97fc-4c65-8699-1b3c321bd250" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1270.546088] env[69994]: DEBUG oslo_vmware.api [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242799, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.905174} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1270.546363] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 1735049d-a240-48fc-a360-3b00b02225b1/1735049d-a240-48fc-a360-3b00b02225b1.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1270.546566] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1270.546828] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b391cb94-9813-4b72-a350-6713460a12f9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.556760] env[69994]: DEBUG oslo_vmware.api [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1270.556760] env[69994]: value = "task-3242800" [ 1270.556760] env[69994]: _type = "Task" [ 1270.556760] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1270.570625] env[69994]: DEBUG oslo_vmware.api [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242800, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.629977] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1f1ccad-1405-40aa-bec0-ce5b7eeefef3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.638453] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a20a0a98-1f64-406f-8e76-d1a90c7da542 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.670668] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f26b66d-9d8c-4288-a297-af9b3d349b9e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.679393] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14f08e9f-7c87-4383-8fef-752dda8f0b6c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.695120] env[69994]: DEBUG nova.compute.provider_tree [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1270.856144] env[69994]: DEBUG nova.compute.manager [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=69994) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1270.856376] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1271.070617] env[69994]: DEBUG oslo_vmware.api [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242800, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072425} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.070889] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1271.071716] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-495a3b54-56d6-4cf7-a1de-f04cab7b37dd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.096294] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Reconfiguring VM instance instance-0000006f to attach disk [datastore2] 1735049d-a240-48fc-a360-3b00b02225b1/1735049d-a240-48fc-a360-3b00b02225b1.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1271.096602] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dabfc23f-5d24-4033-988a-9e5772584b7a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.121581] env[69994]: DEBUG oslo_vmware.api [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1271.121581] env[69994]: value = "task-3242801" [ 1271.121581] env[69994]: _type = "Task" [ 1271.121581] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.130961] env[69994]: DEBUG oslo_vmware.api [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242801, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.199494] env[69994]: DEBUG nova.scheduler.client.report [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1271.484326] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1271.484734] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1271.634134] env[69994]: DEBUG oslo_vmware.api [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242801, 'name': ReconfigVM_Task, 'duration_secs': 0.345402} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.634460] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Reconfigured VM instance instance-0000006f to attach disk [datastore2] 1735049d-a240-48fc-a360-3b00b02225b1/1735049d-a240-48fc-a360-3b00b02225b1.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1271.635183] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-31f46563-ce30-4825-8348-2aea405173bb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.645049] env[69994]: DEBUG oslo_vmware.api [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1271.645049] env[69994]: value = "task-3242802" [ 1271.645049] env[69994]: _type = "Task" [ 1271.645049] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.653167] env[69994]: DEBUG oslo_vmware.api [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242802, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.692812] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "73288b0c-7e85-48cd-9ea1-d08a31a81c32" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1271.693214] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "73288b0c-7e85-48cd-9ea1-d08a31a81c32" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1271.704582] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.221s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1271.704939] env[69994]: DEBUG nova.compute.manager [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1271.708601] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.852s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1271.990561] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1271.990770] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1272.154723] env[69994]: DEBUG oslo_vmware.api [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242802, 'name': Rename_Task, 'duration_secs': 0.166338} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1272.155331] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1272.155577] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d85039c0-1fa5-4f6c-9ae3-5e6abdb802c5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.162826] env[69994]: DEBUG oslo_vmware.api [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1272.162826] env[69994]: value = "task-3242803" [ 1272.162826] env[69994]: _type = "Task" [ 1272.162826] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1272.170595] env[69994]: DEBUG oslo_vmware.api [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242803, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.198315] env[69994]: DEBUG nova.compute.manager [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1272.210027] env[69994]: DEBUG nova.compute.utils [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1272.211349] env[69994]: DEBUG nova.compute.manager [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1272.211543] env[69994]: DEBUG nova.network.neutron [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1272.214369] env[69994]: DEBUG nova.objects.instance [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lazy-loading 'migration_context' on Instance uuid eea243fb-97fc-4c65-8699-1b3c321bd250 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1272.252672] env[69994]: DEBUG nova.policy [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '64b979ffffc94e09bf911bdb89f4796a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ccb64f97e46a4e499df974959db53dcd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1272.396949] env[69994]: DEBUG oslo_concurrency.lockutils [None req-86faba0b-a90b-47e0-a646-f1264891d1ab tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquiring lock "03e58b14-12fe-46e5-b483-4176d5a43c0e" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1272.397239] env[69994]: DEBUG oslo_concurrency.lockutils [None req-86faba0b-a90b-47e0-a646-f1264891d1ab tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lock "03e58b14-12fe-46e5-b483-4176d5a43c0e" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1272.530027] env[69994]: DEBUG nova.network.neutron [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Successfully created port: 068d9b2b-b272-416b-8986-4baa4e3c1270 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1272.673499] env[69994]: DEBUG oslo_vmware.api [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242803, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.721252] env[69994]: DEBUG nova.compute.manager [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1272.727713] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1272.838711] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-493556d5-1c62-422d-8782-9fefc07136d2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.847331] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c49430b9-cd28-4d36-b564-02b298b70269 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.877870] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76b2575e-fc67-49d2-823a-e2912b2896a7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.886010] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8656928-d99d-41ba-86ee-f417aa5f2363 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.901471] env[69994]: INFO nova.compute.manager [None req-86faba0b-a90b-47e0-a646-f1264891d1ab tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Detaching volume 7f73fdd9-c95c-42cc-91c9-d54e30fd2995 [ 1272.903473] env[69994]: DEBUG nova.compute.provider_tree [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1272.940296] env[69994]: INFO nova.virt.block_device [None req-86faba0b-a90b-47e0-a646-f1264891d1ab tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Attempting to driver detach volume 7f73fdd9-c95c-42cc-91c9-d54e30fd2995 from mountpoint /dev/sdb [ 1272.941242] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-86faba0b-a90b-47e0-a646-f1264891d1ab tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Volume detach. Driver type: vmdk {{(pid=69994) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1272.941242] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-86faba0b-a90b-47e0-a646-f1264891d1ab tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-648040', 'volume_id': '7f73fdd9-c95c-42cc-91c9-d54e30fd2995', 'name': 'volume-7f73fdd9-c95c-42cc-91c9-d54e30fd2995', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '03e58b14-12fe-46e5-b483-4176d5a43c0e', 'attached_at': '', 'detached_at': '', 'volume_id': '7f73fdd9-c95c-42cc-91c9-d54e30fd2995', 'serial': '7f73fdd9-c95c-42cc-91c9-d54e30fd2995'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1272.941895] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c93bbfbd-26dd-4ce4-8e45-20ab977a5a11 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.965046] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60ad4d98-ecaf-4a40-9de8-b1f0ae956eca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.972399] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31d80324-627d-4de6-894b-9620469252da {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.992838] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51568d3b-7b53-41a2-ba29-e23815e10ff1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.007727] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-86faba0b-a90b-47e0-a646-f1264891d1ab tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] The volume has not been displaced from its original location: [datastore2] volume-7f73fdd9-c95c-42cc-91c9-d54e30fd2995/volume-7f73fdd9-c95c-42cc-91c9-d54e30fd2995.vmdk. No consolidation needed. {{(pid=69994) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1273.012883] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-86faba0b-a90b-47e0-a646-f1264891d1ab tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Reconfiguring VM instance instance-00000067 to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1273.013148] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-37af6a3c-11f2-41be-ae90-fb5b26b34bd6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.030633] env[69994]: DEBUG oslo_vmware.api [None req-86faba0b-a90b-47e0-a646-f1264891d1ab tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1273.030633] env[69994]: value = "task-3242804" [ 1273.030633] env[69994]: _type = "Task" [ 1273.030633] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1273.037939] env[69994]: DEBUG oslo_vmware.api [None req-86faba0b-a90b-47e0-a646-f1264891d1ab tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242804, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.173831] env[69994]: DEBUG oslo_vmware.api [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242803, 'name': PowerOnVM_Task, 'duration_secs': 0.514554} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1273.174102] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1273.174336] env[69994]: INFO nova.compute.manager [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Took 8.84 seconds to spawn the instance on the hypervisor. [ 1273.174486] env[69994]: DEBUG nova.compute.manager [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1273.175273] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b8ea176-8832-4308-a2b8-0cc99f216899 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.226499] env[69994]: INFO nova.virt.block_device [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Booting with volume c725763f-b1d7-421a-95e2-cd5644ee630e at /dev/sda [ 1273.261246] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aea27309-a0b4-4ffb-93c1-3f82086e621e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.272061] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a82105d9-3390-4002-8abf-6d0029d81f2e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.025623] env[69994]: DEBUG nova.scheduler.client.report [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1274.040359] env[69994]: DEBUG nova.compute.manager [req-ccdf72fe-6e03-4a9d-9a6f-06f75e676578 req-f3a61d9b-f3c3-4eb6-bcc6-2f5e7a486cce service nova] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Received event network-vif-plugged-068d9b2b-b272-416b-8986-4baa4e3c1270 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1274.040540] env[69994]: DEBUG oslo_concurrency.lockutils [req-ccdf72fe-6e03-4a9d-9a6f-06f75e676578 req-f3a61d9b-f3c3-4eb6-bcc6-2f5e7a486cce service nova] Acquiring lock "217bd31d-f705-4aa7-a8a7-d79e407b7c7b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1274.040750] env[69994]: DEBUG oslo_concurrency.lockutils [req-ccdf72fe-6e03-4a9d-9a6f-06f75e676578 req-f3a61d9b-f3c3-4eb6-bcc6-2f5e7a486cce service nova] Lock "217bd31d-f705-4aa7-a8a7-d79e407b7c7b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1274.040913] env[69994]: DEBUG oslo_concurrency.lockutils [req-ccdf72fe-6e03-4a9d-9a6f-06f75e676578 req-f3a61d9b-f3c3-4eb6-bcc6-2f5e7a486cce service nova] Lock "217bd31d-f705-4aa7-a8a7-d79e407b7c7b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1274.041084] env[69994]: DEBUG nova.compute.manager [req-ccdf72fe-6e03-4a9d-9a6f-06f75e676578 req-f3a61d9b-f3c3-4eb6-bcc6-2f5e7a486cce service nova] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] No waiting events found dispatching network-vif-plugged-068d9b2b-b272-416b-8986-4baa4e3c1270 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1274.041242] env[69994]: WARNING nova.compute.manager [req-ccdf72fe-6e03-4a9d-9a6f-06f75e676578 req-f3a61d9b-f3c3-4eb6-bcc6-2f5e7a486cce service nova] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Received unexpected event network-vif-plugged-068d9b2b-b272-416b-8986-4baa4e3c1270 for instance with vm_state building and task_state block_device_mapping. [ 1274.041719] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1a7efa2e-5ed7-463a-af67-2dc3471ac525 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.045475] env[69994]: INFO nova.compute.manager [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Took 13.92 seconds to build instance. [ 1274.051588] env[69994]: DEBUG oslo_vmware.api [None req-86faba0b-a90b-47e0-a646-f1264891d1ab tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242804, 'name': ReconfigVM_Task, 'duration_secs': 0.293943} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.057025] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-86faba0b-a90b-47e0-a646-f1264891d1ab tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Reconfigured VM instance instance-00000067 to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1274.058801] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-025921fa-f95e-4028-b1bb-ffe1b156f9d0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.072387] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f74035c8-8c2b-46b6-a895-70907bec418d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.084643] env[69994]: DEBUG nova.network.neutron [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Successfully updated port: 068d9b2b-b272-416b-8986-4baa4e3c1270 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1274.093473] env[69994]: DEBUG oslo_vmware.api [None req-86faba0b-a90b-47e0-a646-f1264891d1ab tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1274.093473] env[69994]: value = "task-3242805" [ 1274.093473] env[69994]: _type = "Task" [ 1274.093473] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.111635] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdc5de0d-3d46-4335-bdd1-96907c8b0b37 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.114671] env[69994]: DEBUG oslo_vmware.api [None req-86faba0b-a90b-47e0-a646-f1264891d1ab tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242805, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.121708] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b91f0cdf-016c-45f9-a9e2-acfc4ecf1105 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.140841] env[69994]: DEBUG nova.virt.block_device [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Updating existing volume attachment record: ff2cf93c-6d8c-426a-8cbb-86b96ec025e1 {{(pid=69994) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1274.553620] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2fbc272b-7bba-4c79-816a-481fc8ac5713 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "1735049d-a240-48fc-a360-3b00b02225b1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.436s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1274.584131] env[69994]: DEBUG nova.compute.manager [req-e832fd52-eb06-4e92-b65d-6836047894c9 req-37e9c416-c771-4532-abcd-613b9bfe91be service nova] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Received event network-changed-657bb865-1c59-4abc-b02a-bb91154c3cd9 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1274.584131] env[69994]: DEBUG nova.compute.manager [req-e832fd52-eb06-4e92-b65d-6836047894c9 req-37e9c416-c771-4532-abcd-613b9bfe91be service nova] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Refreshing instance network info cache due to event network-changed-657bb865-1c59-4abc-b02a-bb91154c3cd9. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1274.584131] env[69994]: DEBUG oslo_concurrency.lockutils [req-e832fd52-eb06-4e92-b65d-6836047894c9 req-37e9c416-c771-4532-abcd-613b9bfe91be service nova] Acquiring lock "refresh_cache-1735049d-a240-48fc-a360-3b00b02225b1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1274.584824] env[69994]: DEBUG oslo_concurrency.lockutils [req-e832fd52-eb06-4e92-b65d-6836047894c9 req-37e9c416-c771-4532-abcd-613b9bfe91be service nova] Acquired lock "refresh_cache-1735049d-a240-48fc-a360-3b00b02225b1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1274.585140] env[69994]: DEBUG nova.network.neutron [req-e832fd52-eb06-4e92-b65d-6836047894c9 req-37e9c416-c771-4532-abcd-613b9bfe91be service nova] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Refreshing network info cache for port 657bb865-1c59-4abc-b02a-bb91154c3cd9 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1274.586896] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "refresh_cache-217bd31d-f705-4aa7-a8a7-d79e407b7c7b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1274.587190] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquired lock "refresh_cache-217bd31d-f705-4aa7-a8a7-d79e407b7c7b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1274.587454] env[69994]: DEBUG nova.network.neutron [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1274.607202] env[69994]: DEBUG oslo_vmware.api [None req-86faba0b-a90b-47e0-a646-f1264891d1ab tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242805, 'name': ReconfigVM_Task, 'duration_secs': 0.185083} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.607202] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-86faba0b-a90b-47e0-a646-f1264891d1ab tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-648040', 'volume_id': '7f73fdd9-c95c-42cc-91c9-d54e30fd2995', 'name': 'volume-7f73fdd9-c95c-42cc-91c9-d54e30fd2995', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '03e58b14-12fe-46e5-b483-4176d5a43c0e', 'attached_at': '', 'detached_at': '', 'volume_id': '7f73fdd9-c95c-42cc-91c9-d54e30fd2995', 'serial': '7f73fdd9-c95c-42cc-91c9-d54e30fd2995'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1275.042373] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.334s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1275.048099] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.320s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1275.049477] env[69994]: INFO nova.compute.claims [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1275.120367] env[69994]: DEBUG nova.network.neutron [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1275.150431] env[69994]: DEBUG nova.objects.instance [None req-86faba0b-a90b-47e0-a646-f1264891d1ab tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lazy-loading 'flavor' on Instance uuid 03e58b14-12fe-46e5-b483-4176d5a43c0e {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1275.265813] env[69994]: DEBUG nova.network.neutron [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Updating instance_info_cache with network_info: [{"id": "068d9b2b-b272-416b-8986-4baa4e3c1270", "address": "fa:16:3e:9c:df:58", "network": {"id": "84a79aab-0d1e-4aaf-9422-316cb7d239fb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1732515887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccb64f97e46a4e499df974959db53dcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap068d9b2b-b2", "ovs_interfaceid": "068d9b2b-b272-416b-8986-4baa4e3c1270", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1275.307682] env[69994]: DEBUG nova.network.neutron [req-e832fd52-eb06-4e92-b65d-6836047894c9 req-37e9c416-c771-4532-abcd-613b9bfe91be service nova] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Updated VIF entry in instance network info cache for port 657bb865-1c59-4abc-b02a-bb91154c3cd9. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1275.308054] env[69994]: DEBUG nova.network.neutron [req-e832fd52-eb06-4e92-b65d-6836047894c9 req-37e9c416-c771-4532-abcd-613b9bfe91be service nova] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Updating instance_info_cache with network_info: [{"id": "657bb865-1c59-4abc-b02a-bb91154c3cd9", "address": "fa:16:3e:9c:01:ca", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap657bb865-1c", "ovs_interfaceid": "657bb865-1c59-4abc-b02a-bb91154c3cd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1275.768814] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Releasing lock "refresh_cache-217bd31d-f705-4aa7-a8a7-d79e407b7c7b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1275.769106] env[69994]: DEBUG nova.compute.manager [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Instance network_info: |[{"id": "068d9b2b-b272-416b-8986-4baa4e3c1270", "address": "fa:16:3e:9c:df:58", "network": {"id": "84a79aab-0d1e-4aaf-9422-316cb7d239fb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1732515887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccb64f97e46a4e499df974959db53dcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap068d9b2b-b2", "ovs_interfaceid": "068d9b2b-b272-416b-8986-4baa4e3c1270", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1275.810221] env[69994]: DEBUG oslo_concurrency.lockutils [req-e832fd52-eb06-4e92-b65d-6836047894c9 req-37e9c416-c771-4532-abcd-613b9bfe91be service nova] Releasing lock "refresh_cache-1735049d-a240-48fc-a360-3b00b02225b1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1275.985982] env[69994]: DEBUG nova.compute.manager [req-953406e5-c983-48b2-aa16-24010eeb1b71 req-7c513f1e-1998-4e85-a409-e09f70ca258d service nova] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Received event network-changed-068d9b2b-b272-416b-8986-4baa4e3c1270 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1275.986228] env[69994]: DEBUG nova.compute.manager [req-953406e5-c983-48b2-aa16-24010eeb1b71 req-7c513f1e-1998-4e85-a409-e09f70ca258d service nova] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Refreshing instance network info cache due to event network-changed-068d9b2b-b272-416b-8986-4baa4e3c1270. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1275.986468] env[69994]: DEBUG oslo_concurrency.lockutils [req-953406e5-c983-48b2-aa16-24010eeb1b71 req-7c513f1e-1998-4e85-a409-e09f70ca258d service nova] Acquiring lock "refresh_cache-217bd31d-f705-4aa7-a8a7-d79e407b7c7b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1275.986614] env[69994]: DEBUG oslo_concurrency.lockutils [req-953406e5-c983-48b2-aa16-24010eeb1b71 req-7c513f1e-1998-4e85-a409-e09f70ca258d service nova] Acquired lock "refresh_cache-217bd31d-f705-4aa7-a8a7-d79e407b7c7b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1275.986773] env[69994]: DEBUG nova.network.neutron [req-953406e5-c983-48b2-aa16-24010eeb1b71 req-7c513f1e-1998-4e85-a409-e09f70ca258d service nova] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Refreshing network info cache for port 068d9b2b-b272-416b-8986-4baa4e3c1270 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1276.159900] env[69994]: DEBUG oslo_concurrency.lockutils [None req-86faba0b-a90b-47e0-a646-f1264891d1ab tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lock "03e58b14-12fe-46e5-b483-4176d5a43c0e" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.763s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1276.171148] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c37363e7-b7d3-4a6a-b2f3-bc2291efe191 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.180215] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06e596f5-e5fa-4488-8856-1e296f34517a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.212907] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d79c692-c376-4dc6-a477-e99a4c48464c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.221315] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df317cea-efe7-434b-86bf-b2750adcb285 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.235053] env[69994]: DEBUG nova.compute.provider_tree [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1276.244614] env[69994]: DEBUG nova.compute.manager [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1276.245080] env[69994]: DEBUG nova.virt.hardware [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1276.246050] env[69994]: DEBUG nova.virt.hardware [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1276.246050] env[69994]: DEBUG nova.virt.hardware [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1276.246050] env[69994]: DEBUG nova.virt.hardware [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1276.246050] env[69994]: DEBUG nova.virt.hardware [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1276.246050] env[69994]: DEBUG nova.virt.hardware [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1276.246276] env[69994]: DEBUG nova.virt.hardware [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1276.246276] env[69994]: DEBUG nova.virt.hardware [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1276.246411] env[69994]: DEBUG nova.virt.hardware [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1276.246578] env[69994]: DEBUG nova.virt.hardware [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1276.246802] env[69994]: DEBUG nova.virt.hardware [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1276.248028] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-588364dc-a465-4649-908d-7327f49e3b61 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.256993] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c425bb4a-b35f-4819-a7af-18975e964b58 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.271532] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9c:df:58', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c2383c27-232e-4745-9b0a-2dcbaabb188b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '068d9b2b-b272-416b-8986-4baa4e3c1270', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1276.282926] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1276.283231] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1276.283540] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ebea22f2-14f0-42fe-b421-85c1c2836722 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.315195] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1276.315195] env[69994]: value = "task-3242806" [ 1276.315195] env[69994]: _type = "Task" [ 1276.315195] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.326538] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242806, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.584865] env[69994]: INFO nova.compute.manager [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Swapping old allocation on dict_keys(['92ce3c95-4efe-4d04-802b-6b187afc5aa7']) held by migration 2bfc809a-fb4d-4b03-a5ba-2be988727a62 for instance [ 1276.610189] env[69994]: DEBUG nova.scheduler.client.report [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Overwriting current allocation {'allocations': {'92ce3c95-4efe-4d04-802b-6b187afc5aa7': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 164}}, 'project_id': '38d5a89ed7c248c3be506ef12caf5f1e', 'user_id': '08a2b92b6c0141a6a7e301e064032289', 'consumer_generation': 1} on consumer eea243fb-97fc-4c65-8699-1b3c321bd250 {{(pid=69994) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1276.654780] env[69994]: DEBUG oslo_concurrency.lockutils [None req-10d68257-ba64-4ee5-af65-4d0182b5597c tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquiring lock "03e58b14-12fe-46e5-b483-4176d5a43c0e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1276.655109] env[69994]: DEBUG oslo_concurrency.lockutils [None req-10d68257-ba64-4ee5-af65-4d0182b5597c tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lock "03e58b14-12fe-46e5-b483-4176d5a43c0e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1276.655358] env[69994]: DEBUG oslo_concurrency.lockutils [None req-10d68257-ba64-4ee5-af65-4d0182b5597c tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquiring lock "03e58b14-12fe-46e5-b483-4176d5a43c0e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1276.655548] env[69994]: DEBUG oslo_concurrency.lockutils [None req-10d68257-ba64-4ee5-af65-4d0182b5597c tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lock "03e58b14-12fe-46e5-b483-4176d5a43c0e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1276.655734] env[69994]: DEBUG oslo_concurrency.lockutils [None req-10d68257-ba64-4ee5-af65-4d0182b5597c tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lock "03e58b14-12fe-46e5-b483-4176d5a43c0e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1276.657991] env[69994]: INFO nova.compute.manager [None req-10d68257-ba64-4ee5-af65-4d0182b5597c tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Terminating instance [ 1276.701126] env[69994]: DEBUG nova.network.neutron [req-953406e5-c983-48b2-aa16-24010eeb1b71 req-7c513f1e-1998-4e85-a409-e09f70ca258d service nova] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Updated VIF entry in instance network info cache for port 068d9b2b-b272-416b-8986-4baa4e3c1270. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1276.701126] env[69994]: DEBUG nova.network.neutron [req-953406e5-c983-48b2-aa16-24010eeb1b71 req-7c513f1e-1998-4e85-a409-e09f70ca258d service nova] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Updating instance_info_cache with network_info: [{"id": "068d9b2b-b272-416b-8986-4baa4e3c1270", "address": "fa:16:3e:9c:df:58", "network": {"id": "84a79aab-0d1e-4aaf-9422-316cb7d239fb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1732515887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccb64f97e46a4e499df974959db53dcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap068d9b2b-b2", "ovs_interfaceid": "068d9b2b-b272-416b-8986-4baa4e3c1270", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1276.719097] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "refresh_cache-eea243fb-97fc-4c65-8699-1b3c321bd250" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1276.719288] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquired lock "refresh_cache-eea243fb-97fc-4c65-8699-1b3c321bd250" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1276.719458] env[69994]: DEBUG nova.network.neutron [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1276.738712] env[69994]: DEBUG nova.scheduler.client.report [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1276.825926] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242806, 'name': CreateVM_Task, 'duration_secs': 0.386573} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.826099] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1276.826779] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'attachment_id': 'ff2cf93c-6d8c-426a-8cbb-86b96ec025e1', 'disk_bus': None, 'guest_format': None, 'delete_on_termination': True, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-648050', 'volume_id': 'c725763f-b1d7-421a-95e2-cd5644ee630e', 'name': 'volume-c725763f-b1d7-421a-95e2-cd5644ee630e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '217bd31d-f705-4aa7-a8a7-d79e407b7c7b', 'attached_at': '', 'detached_at': '', 'volume_id': 'c725763f-b1d7-421a-95e2-cd5644ee630e', 'serial': 'c725763f-b1d7-421a-95e2-cd5644ee630e'}, 'mount_device': '/dev/sda', 'boot_index': 0, 'device_type': None, 'volume_type': None}], 'swap': None} {{(pid=69994) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1276.826991] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Root volume attach. Driver type: vmdk {{(pid=69994) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1276.827761] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba4462d3-06f3-490b-ad61-c0f6b519227f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.835080] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7c7fa48-dfd6-4336-8fa6-1a8fed14e4a6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.840832] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddff29a5-12d6-43d8-9a6f-508de7ddf189 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.847203] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-028b0976-cd08-42b3-a260-31423e899379 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.854171] env[69994]: DEBUG oslo_vmware.api [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1276.854171] env[69994]: value = "task-3242807" [ 1276.854171] env[69994]: _type = "Task" [ 1276.854171] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.861170] env[69994]: DEBUG oslo_vmware.api [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242807, 'name': RelocateVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.163367] env[69994]: DEBUG nova.compute.manager [None req-10d68257-ba64-4ee5-af65-4d0182b5597c tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1277.163755] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-10d68257-ba64-4ee5-af65-4d0182b5597c tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1277.164575] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4b6ef54-74ac-4702-b007-56a3dca46a69 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.172231] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-10d68257-ba64-4ee5-af65-4d0182b5597c tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1277.172469] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7efd32d6-88cc-4965-9912-ec5861b9a575 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.180311] env[69994]: DEBUG oslo_vmware.api [None req-10d68257-ba64-4ee5-af65-4d0182b5597c tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1277.180311] env[69994]: value = "task-3242808" [ 1277.180311] env[69994]: _type = "Task" [ 1277.180311] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.189213] env[69994]: DEBUG oslo_vmware.api [None req-10d68257-ba64-4ee5-af65-4d0182b5597c tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242808, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.202911] env[69994]: DEBUG oslo_concurrency.lockutils [req-953406e5-c983-48b2-aa16-24010eeb1b71 req-7c513f1e-1998-4e85-a409-e09f70ca258d service nova] Releasing lock "refresh_cache-217bd31d-f705-4aa7-a8a7-d79e407b7c7b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1277.242846] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.195s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1277.243217] env[69994]: DEBUG nova.compute.manager [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1277.364266] env[69994]: DEBUG oslo_vmware.api [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242807, 'name': RelocateVM_Task, 'duration_secs': 0.374357} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.364628] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Volume attach. Driver type: vmdk {{(pid=69994) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1277.364919] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-648050', 'volume_id': 'c725763f-b1d7-421a-95e2-cd5644ee630e', 'name': 'volume-c725763f-b1d7-421a-95e2-cd5644ee630e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '217bd31d-f705-4aa7-a8a7-d79e407b7c7b', 'attached_at': '', 'detached_at': '', 'volume_id': 'c725763f-b1d7-421a-95e2-cd5644ee630e', 'serial': 'c725763f-b1d7-421a-95e2-cd5644ee630e'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1277.365777] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-815ef3d8-9dd3-4006-8bcd-4b2f8fbe4703 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.382475] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cae14ebd-2649-4487-90de-f1dcdc91c143 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.404924] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] volume-c725763f-b1d7-421a-95e2-cd5644ee630e/volume-c725763f-b1d7-421a-95e2-cd5644ee630e.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1277.405237] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d9c6da29-4026-4b65-8173-b3129f8b469d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.428538] env[69994]: DEBUG oslo_vmware.api [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1277.428538] env[69994]: value = "task-3242809" [ 1277.428538] env[69994]: _type = "Task" [ 1277.428538] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.438465] env[69994]: DEBUG oslo_vmware.api [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242809, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.692206] env[69994]: DEBUG oslo_vmware.api [None req-10d68257-ba64-4ee5-af65-4d0182b5597c tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242808, 'name': PowerOffVM_Task, 'duration_secs': 0.30524} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.694810] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-10d68257-ba64-4ee5-af65-4d0182b5597c tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1277.695087] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-10d68257-ba64-4ee5-af65-4d0182b5597c tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1277.695385] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-53bb30af-c652-4757-8d50-64a96cb97309 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.741116] env[69994]: DEBUG nova.network.neutron [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Updating instance_info_cache with network_info: [{"id": "a8446ab9-60ee-4fc2-8b75-e53b3b39a38f", "address": "fa:16:3e:3d:0c:3b", "network": {"id": "ac8cc640-01c3-4a64-8b69-1458ee2131a1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1685085861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38d5a89ed7c248c3be506ef12caf5f1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8446ab9-60", "ovs_interfaceid": "a8446ab9-60ee-4fc2-8b75-e53b3b39a38f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1277.748408] env[69994]: DEBUG nova.compute.utils [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1277.749769] env[69994]: DEBUG nova.compute.manager [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1277.749930] env[69994]: DEBUG nova.network.neutron [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1277.764639] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-10d68257-ba64-4ee5-af65-4d0182b5597c tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1277.764871] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-10d68257-ba64-4ee5-af65-4d0182b5597c tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1277.765067] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-10d68257-ba64-4ee5-af65-4d0182b5597c tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Deleting the datastore file [datastore1] 03e58b14-12fe-46e5-b483-4176d5a43c0e {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1277.765372] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3a069041-c096-4c08-a4c4-0c6c5ac9cdaa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.773423] env[69994]: DEBUG oslo_vmware.api [None req-10d68257-ba64-4ee5-af65-4d0182b5597c tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1277.773423] env[69994]: value = "task-3242811" [ 1277.773423] env[69994]: _type = "Task" [ 1277.773423] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.781634] env[69994]: DEBUG oslo_vmware.api [None req-10d68257-ba64-4ee5-af65-4d0182b5597c tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242811, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.821016] env[69994]: DEBUG nova.policy [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '432370da6f1840db8f93b613ca52e31d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '42ee300d6f33459da1deb82b1b14cf74', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1277.942303] env[69994]: DEBUG oslo_vmware.api [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242809, 'name': ReconfigVM_Task, 'duration_secs': 0.509656} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.942303] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Reconfigured VM instance instance-00000070 to attach disk [datastore1] volume-c725763f-b1d7-421a-95e2-cd5644ee630e/volume-c725763f-b1d7-421a-95e2-cd5644ee630e.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1277.946500] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-df6af3ad-c910-4888-bee9-f12f45de13f8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.962536] env[69994]: DEBUG oslo_vmware.api [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1277.962536] env[69994]: value = "task-3242812" [ 1277.962536] env[69994]: _type = "Task" [ 1277.962536] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.970553] env[69994]: DEBUG oslo_vmware.api [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242812, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.106018] env[69994]: DEBUG nova.network.neutron [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Successfully created port: 895c33b4-795e-44d8-b1c8-fa7abde175c8 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1278.244373] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Releasing lock "refresh_cache-eea243fb-97fc-4c65-8699-1b3c321bd250" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1278.245012] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1278.245334] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-122ecf8c-bde4-44e7-a86e-40a5873556c5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.254145] env[69994]: DEBUG nova.compute.manager [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1278.258279] env[69994]: DEBUG oslo_vmware.api [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1278.258279] env[69994]: value = "task-3242813" [ 1278.258279] env[69994]: _type = "Task" [ 1278.258279] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.268875] env[69994]: DEBUG oslo_vmware.api [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242813, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.284198] env[69994]: DEBUG oslo_vmware.api [None req-10d68257-ba64-4ee5-af65-4d0182b5597c tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242811, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154042} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.286305] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-10d68257-ba64-4ee5-af65-4d0182b5597c tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1278.286305] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-10d68257-ba64-4ee5-af65-4d0182b5597c tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1278.286305] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-10d68257-ba64-4ee5-af65-4d0182b5597c tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1278.286305] env[69994]: INFO nova.compute.manager [None req-10d68257-ba64-4ee5-af65-4d0182b5597c tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1278.286305] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-10d68257-ba64-4ee5-af65-4d0182b5597c tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1278.286305] env[69994]: DEBUG nova.compute.manager [-] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1278.286305] env[69994]: DEBUG nova.network.neutron [-] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1278.472784] env[69994]: DEBUG oslo_vmware.api [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242812, 'name': ReconfigVM_Task, 'duration_secs': 0.363727} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.473037] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-648050', 'volume_id': 'c725763f-b1d7-421a-95e2-cd5644ee630e', 'name': 'volume-c725763f-b1d7-421a-95e2-cd5644ee630e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '217bd31d-f705-4aa7-a8a7-d79e407b7c7b', 'attached_at': '', 'detached_at': '', 'volume_id': 'c725763f-b1d7-421a-95e2-cd5644ee630e', 'serial': 'c725763f-b1d7-421a-95e2-cd5644ee630e'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1278.473587] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b419885a-536c-4883-ad61-3185005402c7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.481134] env[69994]: DEBUG oslo_vmware.api [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1278.481134] env[69994]: value = "task-3242814" [ 1278.481134] env[69994]: _type = "Task" [ 1278.481134] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.489227] env[69994]: DEBUG oslo_vmware.api [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242814, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.737041] env[69994]: DEBUG nova.compute.manager [req-246cfac0-9b8a-4ac3-9bfe-52cc3d13d98f req-37534777-616a-4efc-aa95-3f490652b60d service nova] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Received event network-vif-deleted-fc354355-eb77-47cd-9f5b-89c8e6616b1d {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1278.737041] env[69994]: INFO nova.compute.manager [req-246cfac0-9b8a-4ac3-9bfe-52cc3d13d98f req-37534777-616a-4efc-aa95-3f490652b60d service nova] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Neutron deleted interface fc354355-eb77-47cd-9f5b-89c8e6616b1d; detaching it from the instance and deleting it from the info cache [ 1278.737041] env[69994]: DEBUG nova.network.neutron [req-246cfac0-9b8a-4ac3-9bfe-52cc3d13d98f req-37534777-616a-4efc-aa95-3f490652b60d service nova] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1278.772303] env[69994]: DEBUG oslo_vmware.api [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242813, 'name': PowerOffVM_Task, 'duration_secs': 0.401843} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.772615] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1278.774028] env[69994]: DEBUG nova.virt.hardware [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1278.774251] env[69994]: DEBUG nova.virt.hardware [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1278.774405] env[69994]: DEBUG nova.virt.hardware [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1278.774583] env[69994]: DEBUG nova.virt.hardware [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1278.774725] env[69994]: DEBUG nova.virt.hardware [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1278.774873] env[69994]: DEBUG nova.virt.hardware [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1278.775170] env[69994]: DEBUG nova.virt.hardware [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1278.775361] env[69994]: DEBUG nova.virt.hardware [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1278.775537] env[69994]: DEBUG nova.virt.hardware [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1278.775714] env[69994]: DEBUG nova.virt.hardware [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1278.775892] env[69994]: DEBUG nova.virt.hardware [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1278.783183] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a64b782e-e33d-40d2-b691-9b4b6abe9097 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.801990] env[69994]: DEBUG oslo_vmware.api [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1278.801990] env[69994]: value = "task-3242815" [ 1278.801990] env[69994]: _type = "Task" [ 1278.801990] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.811285] env[69994]: DEBUG oslo_vmware.api [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242815, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.992322] env[69994]: DEBUG oslo_vmware.api [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242814, 'name': Rename_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.215936] env[69994]: DEBUG nova.network.neutron [-] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1279.238643] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2d7aea94-3fd2-4ce0-8a08-e24a0d2f7896 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.249310] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3474ca1a-3918-4a69-8a98-d86ff3fb3ea4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.267946] env[69994]: DEBUG nova.compute.manager [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1279.278763] env[69994]: DEBUG nova.compute.manager [req-246cfac0-9b8a-4ac3-9bfe-52cc3d13d98f req-37534777-616a-4efc-aa95-3f490652b60d service nova] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Detach interface failed, port_id=fc354355-eb77-47cd-9f5b-89c8e6616b1d, reason: Instance 03e58b14-12fe-46e5-b483-4176d5a43c0e could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1279.298062] env[69994]: DEBUG nova.virt.hardware [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1279.298307] env[69994]: DEBUG nova.virt.hardware [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1279.298473] env[69994]: DEBUG nova.virt.hardware [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1279.298722] env[69994]: DEBUG nova.virt.hardware [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1279.298942] env[69994]: DEBUG nova.virt.hardware [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1279.299164] env[69994]: DEBUG nova.virt.hardware [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1279.299388] env[69994]: DEBUG nova.virt.hardware [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1279.299550] env[69994]: DEBUG nova.virt.hardware [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1279.299713] env[69994]: DEBUG nova.virt.hardware [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1279.299875] env[69994]: DEBUG nova.virt.hardware [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1279.300058] env[69994]: DEBUG nova.virt.hardware [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1279.300911] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-786a0c0a-cd72-42ff-81fa-fd18e063af82 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.315442] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2767d982-4ea2-482e-927d-957fe279bf4a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.320038] env[69994]: DEBUG oslo_vmware.api [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242815, 'name': ReconfigVM_Task, 'duration_secs': 0.159912} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.320884] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d21cc0f0-0fab-48a6-a0d6-12178f5b6eae {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.348447] env[69994]: DEBUG nova.virt.hardware [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1279.348709] env[69994]: DEBUG nova.virt.hardware [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1279.348866] env[69994]: DEBUG nova.virt.hardware [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1279.349079] env[69994]: DEBUG nova.virt.hardware [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1279.349243] env[69994]: DEBUG nova.virt.hardware [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1279.349407] env[69994]: DEBUG nova.virt.hardware [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1279.349586] env[69994]: DEBUG nova.virt.hardware [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1279.349745] env[69994]: DEBUG nova.virt.hardware [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1279.349903] env[69994]: DEBUG nova.virt.hardware [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1279.350072] env[69994]: DEBUG nova.virt.hardware [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1279.350246] env[69994]: DEBUG nova.virt.hardware [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1279.351030] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-373224e3-fc4a-4d35-83e6-d9a1edfc0b8a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.357605] env[69994]: DEBUG oslo_vmware.api [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1279.357605] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52a280a7-588c-6be8-31b2-22c007f4e4e6" [ 1279.357605] env[69994]: _type = "Task" [ 1279.357605] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.368012] env[69994]: DEBUG oslo_vmware.api [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52a280a7-588c-6be8-31b2-22c007f4e4e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.471903] env[69994]: DEBUG nova.compute.manager [req-e53279ba-77a2-4e86-b329-6e429717c5d7 req-f79e7409-c017-4f5b-a33f-ace799da00e7 service nova] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Received event network-vif-plugged-895c33b4-795e-44d8-b1c8-fa7abde175c8 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1279.472179] env[69994]: DEBUG oslo_concurrency.lockutils [req-e53279ba-77a2-4e86-b329-6e429717c5d7 req-f79e7409-c017-4f5b-a33f-ace799da00e7 service nova] Acquiring lock "73288b0c-7e85-48cd-9ea1-d08a31a81c32-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1279.472412] env[69994]: DEBUG oslo_concurrency.lockutils [req-e53279ba-77a2-4e86-b329-6e429717c5d7 req-f79e7409-c017-4f5b-a33f-ace799da00e7 service nova] Lock "73288b0c-7e85-48cd-9ea1-d08a31a81c32-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1279.472563] env[69994]: DEBUG oslo_concurrency.lockutils [req-e53279ba-77a2-4e86-b329-6e429717c5d7 req-f79e7409-c017-4f5b-a33f-ace799da00e7 service nova] Lock "73288b0c-7e85-48cd-9ea1-d08a31a81c32-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1279.472733] env[69994]: DEBUG nova.compute.manager [req-e53279ba-77a2-4e86-b329-6e429717c5d7 req-f79e7409-c017-4f5b-a33f-ace799da00e7 service nova] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] No waiting events found dispatching network-vif-plugged-895c33b4-795e-44d8-b1c8-fa7abde175c8 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1279.472888] env[69994]: WARNING nova.compute.manager [req-e53279ba-77a2-4e86-b329-6e429717c5d7 req-f79e7409-c017-4f5b-a33f-ace799da00e7 service nova] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Received unexpected event network-vif-plugged-895c33b4-795e-44d8-b1c8-fa7abde175c8 for instance with vm_state building and task_state spawning. [ 1279.493307] env[69994]: DEBUG oslo_vmware.api [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242814, 'name': Rename_Task, 'duration_secs': 0.959837} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.493732] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1279.494026] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d2d5fd5c-ad63-45cc-b3c0-ba75a8795048 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.501209] env[69994]: DEBUG oslo_vmware.api [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1279.501209] env[69994]: value = "task-3242816" [ 1279.501209] env[69994]: _type = "Task" [ 1279.501209] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.510572] env[69994]: DEBUG oslo_vmware.api [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242816, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.563224] env[69994]: DEBUG nova.network.neutron [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Successfully updated port: 895c33b4-795e-44d8-b1c8-fa7abde175c8 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1279.720069] env[69994]: INFO nova.compute.manager [-] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Took 1.43 seconds to deallocate network for instance. [ 1279.868832] env[69994]: DEBUG oslo_vmware.api [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52a280a7-588c-6be8-31b2-22c007f4e4e6, 'name': SearchDatastore_Task, 'duration_secs': 0.008141} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.874239] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Reconfiguring VM instance instance-0000006c to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1279.874544] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4f111c53-166c-4528-9e54-5723df5c7471 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.896974] env[69994]: DEBUG oslo_vmware.api [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1279.896974] env[69994]: value = "task-3242817" [ 1279.896974] env[69994]: _type = "Task" [ 1279.896974] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.905506] env[69994]: DEBUG oslo_vmware.api [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242817, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.015253] env[69994]: DEBUG oslo_vmware.api [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242816, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.066586] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "refresh_cache-73288b0c-7e85-48cd-9ea1-d08a31a81c32" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1280.066694] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquired lock "refresh_cache-73288b0c-7e85-48cd-9ea1-d08a31a81c32" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1280.066897] env[69994]: DEBUG nova.network.neutron [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1280.226692] env[69994]: DEBUG oslo_concurrency.lockutils [None req-10d68257-ba64-4ee5-af65-4d0182b5597c tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1280.226978] env[69994]: DEBUG oslo_concurrency.lockutils [None req-10d68257-ba64-4ee5-af65-4d0182b5597c tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1280.227253] env[69994]: DEBUG nova.objects.instance [None req-10d68257-ba64-4ee5-af65-4d0182b5597c tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lazy-loading 'resources' on Instance uuid 03e58b14-12fe-46e5-b483-4176d5a43c0e {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1280.408337] env[69994]: DEBUG oslo_vmware.api [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242817, 'name': ReconfigVM_Task, 'duration_secs': 0.196179} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.408689] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Reconfigured VM instance instance-0000006c to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1280.409499] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-add91191-c7c7-4e15-9302-3b9d6f627836 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.434132] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] eea243fb-97fc-4c65-8699-1b3c321bd250/eea243fb-97fc-4c65-8699-1b3c321bd250.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1280.434463] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e4249c90-fb20-4380-8713-42921af6a231 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.453837] env[69994]: DEBUG oslo_vmware.api [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1280.453837] env[69994]: value = "task-3242818" [ 1280.453837] env[69994]: _type = "Task" [ 1280.453837] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.462042] env[69994]: DEBUG oslo_vmware.api [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242818, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.515506] env[69994]: DEBUG oslo_vmware.api [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242816, 'name': PowerOnVM_Task, 'duration_secs': 0.545261} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.515765] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1280.515960] env[69994]: INFO nova.compute.manager [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Took 4.27 seconds to spawn the instance on the hypervisor. [ 1280.516148] env[69994]: DEBUG nova.compute.manager [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1280.516928] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-458e893f-290b-4ea3-9c2a-2ebd78bbbb50 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.600729] env[69994]: DEBUG nova.network.neutron [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1280.743030] env[69994]: DEBUG nova.network.neutron [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Updating instance_info_cache with network_info: [{"id": "895c33b4-795e-44d8-b1c8-fa7abde175c8", "address": "fa:16:3e:c8:eb:be", "network": {"id": "d0928b0d-e338-4b93-b259-f37cac9dde85", "bridge": "br-int", "label": "tempest-ServersTestJSON-727748163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "42ee300d6f33459da1deb82b1b14cf74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089ef678-58b4-4bf0-a39d-b94b2d364291", "external-id": "nsx-vlan-transportzone-675", "segmentation_id": 675, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap895c33b4-79", "ovs_interfaceid": "895c33b4-795e-44d8-b1c8-fa7abde175c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1280.831849] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1188cb57-ff8c-41d4-a0c0-a3ad192dcab8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.839920] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2614b079-c0e3-4df8-b272-6ec00610ca0e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.871053] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fc326a9-c6a1-4713-b9be-344a0a4aba0d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.877946] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20c593d8-6534-4e3d-9ae1-6a8369530c4d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.891948] env[69994]: DEBUG nova.compute.provider_tree [None req-10d68257-ba64-4ee5-af65-4d0182b5597c tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1280.964320] env[69994]: DEBUG oslo_vmware.api [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242818, 'name': ReconfigVM_Task, 'duration_secs': 0.286253} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.964623] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Reconfigured VM instance instance-0000006c to attach disk [datastore2] eea243fb-97fc-4c65-8699-1b3c321bd250/eea243fb-97fc-4c65-8699-1b3c321bd250.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1280.965744] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41dc77ae-65df-46ca-9246-7bca437bd245 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.985250] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b1de695-d916-4f8a-9886-adb03687a44e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.003883] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05fa4edc-bf22-45e0-972a-8c268a558567 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.022083] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c029fbfd-4956-4366-a8c9-47a887e29067 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.034053] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1281.035482] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-559a8760-e357-44b6-bd8d-f2f75e1c6ab6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.037311] env[69994]: INFO nova.compute.manager [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Took 11.79 seconds to build instance. [ 1281.043582] env[69994]: DEBUG oslo_vmware.api [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1281.043582] env[69994]: value = "task-3242819" [ 1281.043582] env[69994]: _type = "Task" [ 1281.043582] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.052295] env[69994]: DEBUG oslo_vmware.api [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242819, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.246688] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Releasing lock "refresh_cache-73288b0c-7e85-48cd-9ea1-d08a31a81c32" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1281.247047] env[69994]: DEBUG nova.compute.manager [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Instance network_info: |[{"id": "895c33b4-795e-44d8-b1c8-fa7abde175c8", "address": "fa:16:3e:c8:eb:be", "network": {"id": "d0928b0d-e338-4b93-b259-f37cac9dde85", "bridge": "br-int", "label": "tempest-ServersTestJSON-727748163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "42ee300d6f33459da1deb82b1b14cf74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089ef678-58b4-4bf0-a39d-b94b2d364291", "external-id": "nsx-vlan-transportzone-675", "segmentation_id": 675, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap895c33b4-79", "ovs_interfaceid": "895c33b4-795e-44d8-b1c8-fa7abde175c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1281.247529] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c8:eb:be', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '089ef678-58b4-4bf0-a39d-b94b2d364291', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '895c33b4-795e-44d8-b1c8-fa7abde175c8', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1281.256186] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Creating folder: Project (42ee300d6f33459da1deb82b1b14cf74). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1281.256186] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-386681f9-93c5-4e39-9810-ef7c068eef79 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.271617] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Created folder: Project (42ee300d6f33459da1deb82b1b14cf74) in parent group-v647729. [ 1281.271994] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Creating folder: Instances. Parent ref: group-v648053. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1281.272410] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8154d535-cbc0-401e-a606-05014d28ccd0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.282602] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Created folder: Instances in parent group-v648053. [ 1281.282858] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1281.283081] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1281.283316] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6d4c674d-fff5-4048-ae66-25c1cbbe1921 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.302802] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1281.302802] env[69994]: value = "task-3242822" [ 1281.302802] env[69994]: _type = "Task" [ 1281.302802] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.310725] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242822, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.395907] env[69994]: DEBUG nova.scheduler.client.report [None req-10d68257-ba64-4ee5-af65-4d0182b5597c tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1281.504510] env[69994]: DEBUG nova.compute.manager [req-55c0af3a-3c0b-421e-8b46-1bd94b1cede8 req-8e006838-8179-452b-ab12-ef325c93c173 service nova] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Received event network-changed-895c33b4-795e-44d8-b1c8-fa7abde175c8 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1281.504750] env[69994]: DEBUG nova.compute.manager [req-55c0af3a-3c0b-421e-8b46-1bd94b1cede8 req-8e006838-8179-452b-ab12-ef325c93c173 service nova] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Refreshing instance network info cache due to event network-changed-895c33b4-795e-44d8-b1c8-fa7abde175c8. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1281.504970] env[69994]: DEBUG oslo_concurrency.lockutils [req-55c0af3a-3c0b-421e-8b46-1bd94b1cede8 req-8e006838-8179-452b-ab12-ef325c93c173 service nova] Acquiring lock "refresh_cache-73288b0c-7e85-48cd-9ea1-d08a31a81c32" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1281.505146] env[69994]: DEBUG oslo_concurrency.lockutils [req-55c0af3a-3c0b-421e-8b46-1bd94b1cede8 req-8e006838-8179-452b-ab12-ef325c93c173 service nova] Acquired lock "refresh_cache-73288b0c-7e85-48cd-9ea1-d08a31a81c32" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1281.505328] env[69994]: DEBUG nova.network.neutron [req-55c0af3a-3c0b-421e-8b46-1bd94b1cede8 req-8e006838-8179-452b-ab12-ef325c93c173 service nova] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Refreshing network info cache for port 895c33b4-795e-44d8-b1c8-fa7abde175c8 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1281.539714] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e8532dcc-4486-4e1b-a75d-92ae114b8878 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "217bd31d-f705-4aa7-a8a7-d79e407b7c7b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.296s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1281.555202] env[69994]: DEBUG oslo_vmware.api [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242819, 'name': PowerOnVM_Task, 'duration_secs': 0.394106} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.555472] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1281.815048] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242822, 'name': CreateVM_Task} progress is 25%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.901678] env[69994]: DEBUG oslo_concurrency.lockutils [None req-10d68257-ba64-4ee5-af65-4d0182b5597c tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.675s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1281.927250] env[69994]: INFO nova.scheduler.client.report [None req-10d68257-ba64-4ee5-af65-4d0182b5597c tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Deleted allocations for instance 03e58b14-12fe-46e5-b483-4176d5a43c0e [ 1282.229661] env[69994]: DEBUG nova.network.neutron [req-55c0af3a-3c0b-421e-8b46-1bd94b1cede8 req-8e006838-8179-452b-ab12-ef325c93c173 service nova] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Updated VIF entry in instance network info cache for port 895c33b4-795e-44d8-b1c8-fa7abde175c8. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1282.230070] env[69994]: DEBUG nova.network.neutron [req-55c0af3a-3c0b-421e-8b46-1bd94b1cede8 req-8e006838-8179-452b-ab12-ef325c93c173 service nova] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Updating instance_info_cache with network_info: [{"id": "895c33b4-795e-44d8-b1c8-fa7abde175c8", "address": "fa:16:3e:c8:eb:be", "network": {"id": "d0928b0d-e338-4b93-b259-f37cac9dde85", "bridge": "br-int", "label": "tempest-ServersTestJSON-727748163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "42ee300d6f33459da1deb82b1b14cf74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089ef678-58b4-4bf0-a39d-b94b2d364291", "external-id": "nsx-vlan-transportzone-675", "segmentation_id": 675, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap895c33b4-79", "ovs_interfaceid": "895c33b4-795e-44d8-b1c8-fa7abde175c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1282.314220] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242822, 'name': CreateVM_Task, 'duration_secs': 0.668765} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.314395] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1282.315073] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1282.315247] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1282.315621] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1282.315835] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03970166-e21d-4a4f-96f7-316e9ea36007 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.320428] env[69994]: DEBUG oslo_vmware.api [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1282.320428] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]528d094b-75cb-9f6c-2da9-814ab78047cb" [ 1282.320428] env[69994]: _type = "Task" [ 1282.320428] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.328939] env[69994]: DEBUG oslo_vmware.api [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]528d094b-75cb-9f6c-2da9-814ab78047cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.435297] env[69994]: DEBUG oslo_concurrency.lockutils [None req-10d68257-ba64-4ee5-af65-4d0182b5597c tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lock "03e58b14-12fe-46e5-b483-4176d5a43c0e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.780s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1282.566058] env[69994]: INFO nova.compute.manager [None req-1d74a34a-3c78-48b3-88f9-b13a0aa43d41 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Updating instance to original state: 'active' [ 1282.733058] env[69994]: DEBUG oslo_concurrency.lockutils [req-55c0af3a-3c0b-421e-8b46-1bd94b1cede8 req-8e006838-8179-452b-ab12-ef325c93c173 service nova] Releasing lock "refresh_cache-73288b0c-7e85-48cd-9ea1-d08a31a81c32" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1282.831739] env[69994]: DEBUG oslo_vmware.api [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]528d094b-75cb-9f6c-2da9-814ab78047cb, 'name': SearchDatastore_Task, 'duration_secs': 0.01121} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.832052] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1282.832286] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1282.832519] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1282.832665] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1282.832841] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1282.833111] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-276d2cd1-63b2-4bbc-a215-a52e4158f4d5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.841746] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1282.841915] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1282.842630] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74e96291-5588-4892-98a4-c4d02d7217bf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.847657] env[69994]: DEBUG oslo_vmware.api [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1282.847657] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52a80120-29d6-f46d-118e-e0ed6c8a0138" [ 1282.847657] env[69994]: _type = "Task" [ 1282.847657] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.855297] env[69994]: DEBUG oslo_vmware.api [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52a80120-29d6-f46d-118e-e0ed6c8a0138, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.359700] env[69994]: DEBUG oslo_vmware.api [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52a80120-29d6-f46d-118e-e0ed6c8a0138, 'name': SearchDatastore_Task, 'duration_secs': 0.008627} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.360515] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24d20897-ee61-45ee-be22-004965cb0969 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.366204] env[69994]: DEBUG oslo_vmware.api [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1283.366204] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52d9c929-539e-8d61-3dac-7565c2df4d55" [ 1283.366204] env[69994]: _type = "Task" [ 1283.366204] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.375492] env[69994]: DEBUG oslo_vmware.api [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52d9c929-539e-8d61-3dac-7565c2df4d55, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.411191] env[69994]: DEBUG nova.compute.manager [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Stashing vm_state: active {{(pid=69994) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1283.535619] env[69994]: DEBUG nova.compute.manager [req-cfa7e60e-e6f7-4cd3-a6e5-60515fffe4e7 req-5f0cdd88-b6f0-457d-9bc9-fc9eea9d1371 service nova] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Received event network-changed-75af8d87-ecba-45ba-867a-8c8e9c0389c9 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1283.535760] env[69994]: DEBUG nova.compute.manager [req-cfa7e60e-e6f7-4cd3-a6e5-60515fffe4e7 req-5f0cdd88-b6f0-457d-9bc9-fc9eea9d1371 service nova] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Refreshing instance network info cache due to event network-changed-75af8d87-ecba-45ba-867a-8c8e9c0389c9. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1283.535984] env[69994]: DEBUG oslo_concurrency.lockutils [req-cfa7e60e-e6f7-4cd3-a6e5-60515fffe4e7 req-5f0cdd88-b6f0-457d-9bc9-fc9eea9d1371 service nova] Acquiring lock "refresh_cache-ef37ce64-2c26-4080-899a-6d9dbb5850c9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1283.536199] env[69994]: DEBUG oslo_concurrency.lockutils [req-cfa7e60e-e6f7-4cd3-a6e5-60515fffe4e7 req-5f0cdd88-b6f0-457d-9bc9-fc9eea9d1371 service nova] Acquired lock "refresh_cache-ef37ce64-2c26-4080-899a-6d9dbb5850c9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1283.536312] env[69994]: DEBUG nova.network.neutron [req-cfa7e60e-e6f7-4cd3-a6e5-60515fffe4e7 req-5f0cdd88-b6f0-457d-9bc9-fc9eea9d1371 service nova] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Refreshing network info cache for port 75af8d87-ecba-45ba-867a-8c8e9c0389c9 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1283.876488] env[69994]: DEBUG oslo_vmware.api [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52d9c929-539e-8d61-3dac-7565c2df4d55, 'name': SearchDatastore_Task, 'duration_secs': 0.016729} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.876844] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1283.877011] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 73288b0c-7e85-48cd-9ea1-d08a31a81c32/73288b0c-7e85-48cd-9ea1-d08a31a81c32.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1283.877268] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9dd00aaa-d711-46a6-ae77-176878a44bad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.885152] env[69994]: DEBUG oslo_vmware.api [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1283.885152] env[69994]: value = "task-3242823" [ 1283.885152] env[69994]: _type = "Task" [ 1283.885152] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.893366] env[69994]: DEBUG oslo_vmware.api [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242823, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.931086] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1283.931375] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1284.311551] env[69994]: DEBUG nova.network.neutron [req-cfa7e60e-e6f7-4cd3-a6e5-60515fffe4e7 req-5f0cdd88-b6f0-457d-9bc9-fc9eea9d1371 service nova] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Updated VIF entry in instance network info cache for port 75af8d87-ecba-45ba-867a-8c8e9c0389c9. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1284.312092] env[69994]: DEBUG nova.network.neutron [req-cfa7e60e-e6f7-4cd3-a6e5-60515fffe4e7 req-5f0cdd88-b6f0-457d-9bc9-fc9eea9d1371 service nova] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Updating instance_info_cache with network_info: [{"id": "75af8d87-ecba-45ba-867a-8c8e9c0389c9", "address": "fa:16:3e:5c:3b:72", "network": {"id": "84a79aab-0d1e-4aaf-9422-316cb7d239fb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1732515887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccb64f97e46a4e499df974959db53dcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75af8d87-ec", "ovs_interfaceid": "75af8d87-ecba-45ba-867a-8c8e9c0389c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1284.330840] env[69994]: DEBUG oslo_concurrency.lockutils [None req-94437cbf-f177-4524-bd2d-090a67a8ce8a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "eea243fb-97fc-4c65-8699-1b3c321bd250" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1284.331154] env[69994]: DEBUG oslo_concurrency.lockutils [None req-94437cbf-f177-4524-bd2d-090a67a8ce8a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "eea243fb-97fc-4c65-8699-1b3c321bd250" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1284.331392] env[69994]: DEBUG oslo_concurrency.lockutils [None req-94437cbf-f177-4524-bd2d-090a67a8ce8a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "eea243fb-97fc-4c65-8699-1b3c321bd250-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1284.331620] env[69994]: DEBUG oslo_concurrency.lockutils [None req-94437cbf-f177-4524-bd2d-090a67a8ce8a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "eea243fb-97fc-4c65-8699-1b3c321bd250-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1284.332616] env[69994]: DEBUG oslo_concurrency.lockutils [None req-94437cbf-f177-4524-bd2d-090a67a8ce8a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "eea243fb-97fc-4c65-8699-1b3c321bd250-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1284.334645] env[69994]: INFO nova.compute.manager [None req-94437cbf-f177-4524-bd2d-090a67a8ce8a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Terminating instance [ 1284.394853] env[69994]: DEBUG oslo_vmware.api [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242823, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.436209] env[69994]: INFO nova.compute.claims [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1284.814610] env[69994]: DEBUG oslo_concurrency.lockutils [req-cfa7e60e-e6f7-4cd3-a6e5-60515fffe4e7 req-5f0cdd88-b6f0-457d-9bc9-fc9eea9d1371 service nova] Releasing lock "refresh_cache-ef37ce64-2c26-4080-899a-6d9dbb5850c9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1284.814855] env[69994]: DEBUG nova.compute.manager [req-cfa7e60e-e6f7-4cd3-a6e5-60515fffe4e7 req-5f0cdd88-b6f0-457d-9bc9-fc9eea9d1371 service nova] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Received event network-changed-068d9b2b-b272-416b-8986-4baa4e3c1270 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1284.815043] env[69994]: DEBUG nova.compute.manager [req-cfa7e60e-e6f7-4cd3-a6e5-60515fffe4e7 req-5f0cdd88-b6f0-457d-9bc9-fc9eea9d1371 service nova] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Refreshing instance network info cache due to event network-changed-068d9b2b-b272-416b-8986-4baa4e3c1270. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1284.815273] env[69994]: DEBUG oslo_concurrency.lockutils [req-cfa7e60e-e6f7-4cd3-a6e5-60515fffe4e7 req-5f0cdd88-b6f0-457d-9bc9-fc9eea9d1371 service nova] Acquiring lock "refresh_cache-217bd31d-f705-4aa7-a8a7-d79e407b7c7b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1284.815414] env[69994]: DEBUG oslo_concurrency.lockutils [req-cfa7e60e-e6f7-4cd3-a6e5-60515fffe4e7 req-5f0cdd88-b6f0-457d-9bc9-fc9eea9d1371 service nova] Acquired lock "refresh_cache-217bd31d-f705-4aa7-a8a7-d79e407b7c7b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1284.815575] env[69994]: DEBUG nova.network.neutron [req-cfa7e60e-e6f7-4cd3-a6e5-60515fffe4e7 req-5f0cdd88-b6f0-457d-9bc9-fc9eea9d1371 service nova] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Refreshing network info cache for port 068d9b2b-b272-416b-8986-4baa4e3c1270 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1284.838246] env[69994]: DEBUG nova.compute.manager [None req-94437cbf-f177-4524-bd2d-090a67a8ce8a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1284.838463] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-94437cbf-f177-4524-bd2d-090a67a8ce8a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1284.839386] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38f3cfe1-a699-4871-8252-a9c2b8e801ac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.849272] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-94437cbf-f177-4524-bd2d-090a67a8ce8a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1284.849565] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0bdaaff1-5940-482f-8add-1945923b006c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.857032] env[69994]: DEBUG oslo_vmware.api [None req-94437cbf-f177-4524-bd2d-090a67a8ce8a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1284.857032] env[69994]: value = "task-3242824" [ 1284.857032] env[69994]: _type = "Task" [ 1284.857032] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.865851] env[69994]: DEBUG oslo_vmware.api [None req-94437cbf-f177-4524-bd2d-090a67a8ce8a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242824, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.895443] env[69994]: DEBUG oslo_vmware.api [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242823, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.979318} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.895806] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 73288b0c-7e85-48cd-9ea1-d08a31a81c32/73288b0c-7e85-48cd-9ea1-d08a31a81c32.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1284.895877] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1284.896143] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c85c73fa-59a0-4490-8a30-6a7c6e3bdcae {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.905106] env[69994]: DEBUG oslo_vmware.api [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1284.905106] env[69994]: value = "task-3242825" [ 1284.905106] env[69994]: _type = "Task" [ 1284.905106] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.913173] env[69994]: DEBUG oslo_vmware.api [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242825, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.943231] env[69994]: INFO nova.compute.resource_tracker [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Updating resource usage from migration 23739038-3397-4e3b-8057-e4f144cc36fe [ 1285.062178] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04bd5b41-2d78-4cfa-aa59-e2a79f3dae06 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.070319] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a28b1c0a-c94f-4b1b-8371-f2c87cea3904 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.103894] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquiring lock "71ee4730-f0e5-4c71-8053-be9e73b702a4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1285.104164] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lock "71ee4730-f0e5-4c71-8053-be9e73b702a4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1285.106682] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8f8923b-9c7a-422f-a48c-41e32861a2c3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.114607] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-330a8a4b-ecc7-471f-afca-76cc868ad322 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.129542] env[69994]: DEBUG nova.compute.provider_tree [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1285.368465] env[69994]: DEBUG oslo_vmware.api [None req-94437cbf-f177-4524-bd2d-090a67a8ce8a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242824, 'name': PowerOffVM_Task, 'duration_secs': 0.261995} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.368718] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-94437cbf-f177-4524-bd2d-090a67a8ce8a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1285.368887] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-94437cbf-f177-4524-bd2d-090a67a8ce8a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1285.369167] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6f265af7-e7d5-4079-896a-64d16a3a4891 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.414808] env[69994]: DEBUG oslo_vmware.api [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242825, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07761} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.415083] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1285.415851] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df8e6735-e892-44f3-bb2c-1d4182699ffb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.440235] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] 73288b0c-7e85-48cd-9ea1-d08a31a81c32/73288b0c-7e85-48cd-9ea1-d08a31a81c32.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1285.443793] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f59ec463-633c-47e4-b54d-a9ab65044afa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.458064] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-94437cbf-f177-4524-bd2d-090a67a8ce8a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1285.458271] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-94437cbf-f177-4524-bd2d-090a67a8ce8a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1285.458447] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-94437cbf-f177-4524-bd2d-090a67a8ce8a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Deleting the datastore file [datastore2] eea243fb-97fc-4c65-8699-1b3c321bd250 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1285.459033] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ba48f31b-f2e2-4004-807a-a6cb562d3e66 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.467576] env[69994]: DEBUG oslo_vmware.api [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1285.467576] env[69994]: value = "task-3242828" [ 1285.467576] env[69994]: _type = "Task" [ 1285.467576] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.467821] env[69994]: DEBUG oslo_vmware.api [None req-94437cbf-f177-4524-bd2d-090a67a8ce8a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1285.467821] env[69994]: value = "task-3242827" [ 1285.467821] env[69994]: _type = "Task" [ 1285.467821] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.478632] env[69994]: DEBUG oslo_vmware.api [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242828, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.481438] env[69994]: DEBUG oslo_vmware.api [None req-94437cbf-f177-4524-bd2d-090a67a8ce8a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242827, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.596180] env[69994]: DEBUG nova.network.neutron [req-cfa7e60e-e6f7-4cd3-a6e5-60515fffe4e7 req-5f0cdd88-b6f0-457d-9bc9-fc9eea9d1371 service nova] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Updated VIF entry in instance network info cache for port 068d9b2b-b272-416b-8986-4baa4e3c1270. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1285.596563] env[69994]: DEBUG nova.network.neutron [req-cfa7e60e-e6f7-4cd3-a6e5-60515fffe4e7 req-5f0cdd88-b6f0-457d-9bc9-fc9eea9d1371 service nova] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Updating instance_info_cache with network_info: [{"id": "068d9b2b-b272-416b-8986-4baa4e3c1270", "address": "fa:16:3e:9c:df:58", "network": {"id": "84a79aab-0d1e-4aaf-9422-316cb7d239fb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1732515887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.240", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccb64f97e46a4e499df974959db53dcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap068d9b2b-b2", "ovs_interfaceid": "068d9b2b-b272-416b-8986-4baa4e3c1270", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1285.606524] env[69994]: DEBUG nova.compute.manager [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1285.632479] env[69994]: DEBUG nova.scheduler.client.report [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1285.981629] env[69994]: DEBUG oslo_vmware.api [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242828, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.984647] env[69994]: DEBUG oslo_vmware.api [None req-94437cbf-f177-4524-bd2d-090a67a8ce8a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242827, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.27747} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.984877] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-94437cbf-f177-4524-bd2d-090a67a8ce8a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1285.985071] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-94437cbf-f177-4524-bd2d-090a67a8ce8a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1285.985246] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-94437cbf-f177-4524-bd2d-090a67a8ce8a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1285.985419] env[69994]: INFO nova.compute.manager [None req-94437cbf-f177-4524-bd2d-090a67a8ce8a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1285.985652] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-94437cbf-f177-4524-bd2d-090a67a8ce8a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1285.985836] env[69994]: DEBUG nova.compute.manager [-] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1285.985931] env[69994]: DEBUG nova.network.neutron [-] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1286.100035] env[69994]: DEBUG oslo_concurrency.lockutils [req-cfa7e60e-e6f7-4cd3-a6e5-60515fffe4e7 req-5f0cdd88-b6f0-457d-9bc9-fc9eea9d1371 service nova] Releasing lock "refresh_cache-217bd31d-f705-4aa7-a8a7-d79e407b7c7b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1286.128323] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1286.141685] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.210s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1286.141907] env[69994]: INFO nova.compute.manager [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Migrating [ 1286.148375] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.020s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1286.149900] env[69994]: INFO nova.compute.claims [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1286.425335] env[69994]: DEBUG nova.compute.manager [req-fc5ee561-1148-4565-8a98-ba53a6b451d3 req-d82fc655-3285-4f7a-87ed-933f34064400 service nova] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Received event network-vif-deleted-a8446ab9-60ee-4fc2-8b75-e53b3b39a38f {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1286.425583] env[69994]: INFO nova.compute.manager [req-fc5ee561-1148-4565-8a98-ba53a6b451d3 req-d82fc655-3285-4f7a-87ed-933f34064400 service nova] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Neutron deleted interface a8446ab9-60ee-4fc2-8b75-e53b3b39a38f; detaching it from the instance and deleting it from the info cache [ 1286.425720] env[69994]: DEBUG nova.network.neutron [req-fc5ee561-1148-4565-8a98-ba53a6b451d3 req-d82fc655-3285-4f7a-87ed-933f34064400 service nova] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1286.480882] env[69994]: DEBUG oslo_vmware.api [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242828, 'name': ReconfigVM_Task, 'duration_secs': 1.003645} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.481123] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Reconfigured VM instance instance-00000071 to attach disk [datastore1] 73288b0c-7e85-48cd-9ea1-d08a31a81c32/73288b0c-7e85-48cd-9ea1-d08a31a81c32.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1286.481721] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d51a47d9-8724-4add-b892-e2609c55b5b8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.488684] env[69994]: DEBUG oslo_vmware.api [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1286.488684] env[69994]: value = "task-3242829" [ 1286.488684] env[69994]: _type = "Task" [ 1286.488684] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.497211] env[69994]: DEBUG oslo_vmware.api [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242829, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.663256] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "refresh_cache-217bd31d-f705-4aa7-a8a7-d79e407b7c7b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1286.663924] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquired lock "refresh_cache-217bd31d-f705-4aa7-a8a7-d79e407b7c7b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1286.663924] env[69994]: DEBUG nova.network.neutron [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1286.909839] env[69994]: DEBUG nova.network.neutron [-] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1286.931286] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-23f47af8-e106-410d-86f7-c8db7dcf1853 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.942844] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c2d6d61-77b2-49ab-9386-9340e389ea20 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.972900] env[69994]: DEBUG nova.compute.manager [req-fc5ee561-1148-4565-8a98-ba53a6b451d3 req-d82fc655-3285-4f7a-87ed-933f34064400 service nova] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Detach interface failed, port_id=a8446ab9-60ee-4fc2-8b75-e53b3b39a38f, reason: Instance eea243fb-97fc-4c65-8699-1b3c321bd250 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1286.999253] env[69994]: DEBUG oslo_vmware.api [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242829, 'name': Rename_Task, 'duration_secs': 0.22179} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.999606] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1286.999775] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-99afee38-7021-40e8-b17c-15a412c9f770 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.007356] env[69994]: DEBUG oslo_vmware.api [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1287.007356] env[69994]: value = "task-3242830" [ 1287.007356] env[69994]: _type = "Task" [ 1287.007356] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.016463] env[69994]: DEBUG oslo_vmware.api [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242830, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.299293] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e579d8ca-fede-4aa3-9aa9-b4761f158be1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.308124] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a25b1ac-bdd8-4301-be52-cd8396bae42f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.347916] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0f08d28-a2b6-4a3f-ac6a-b561541b24c1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.356803] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-109209a2-f212-4adb-91b3-010465e4ef60 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.371895] env[69994]: DEBUG nova.compute.provider_tree [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1287.412834] env[69994]: INFO nova.compute.manager [-] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Took 1.43 seconds to deallocate network for instance. [ 1287.416172] env[69994]: DEBUG nova.network.neutron [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Updating instance_info_cache with network_info: [{"id": "068d9b2b-b272-416b-8986-4baa4e3c1270", "address": "fa:16:3e:9c:df:58", "network": {"id": "84a79aab-0d1e-4aaf-9422-316cb7d239fb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1732515887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.240", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccb64f97e46a4e499df974959db53dcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap068d9b2b-b2", "ovs_interfaceid": "068d9b2b-b272-416b-8986-4baa4e3c1270", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1287.518520] env[69994]: DEBUG oslo_vmware.api [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242830, 'name': PowerOnVM_Task, 'duration_secs': 0.487438} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.518728] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1287.518930] env[69994]: INFO nova.compute.manager [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Took 8.25 seconds to spawn the instance on the hypervisor. [ 1287.519124] env[69994]: DEBUG nova.compute.manager [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1287.519929] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a31dc5b-9d54-4edb-8eb1-32ae8f9b26b9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.876048] env[69994]: DEBUG nova.scheduler.client.report [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1287.921702] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Releasing lock "refresh_cache-217bd31d-f705-4aa7-a8a7-d79e407b7c7b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1287.924459] env[69994]: DEBUG oslo_concurrency.lockutils [None req-94437cbf-f177-4524-bd2d-090a67a8ce8a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1288.040785] env[69994]: INFO nova.compute.manager [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Took 15.34 seconds to build instance. [ 1288.381844] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.233s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1288.382408] env[69994]: DEBUG nova.compute.manager [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1288.386155] env[69994]: DEBUG oslo_concurrency.lockutils [None req-94437cbf-f177-4524-bd2d-090a67a8ce8a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.462s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1288.386364] env[69994]: DEBUG oslo_concurrency.lockutils [None req-94437cbf-f177-4524-bd2d-090a67a8ce8a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1288.404411] env[69994]: INFO nova.scheduler.client.report [None req-94437cbf-f177-4524-bd2d-090a67a8ce8a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Deleted allocations for instance eea243fb-97fc-4c65-8699-1b3c321bd250 [ 1288.543095] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6e0cd09a-3150-4935-8895-85a635eba869 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "73288b0c-7e85-48cd-9ea1-d08a31a81c32" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.850s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1288.888211] env[69994]: DEBUG nova.compute.utils [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1288.890296] env[69994]: DEBUG nova.compute.manager [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1288.890464] env[69994]: DEBUG nova.network.neutron [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1288.910841] env[69994]: DEBUG oslo_concurrency.lockutils [None req-94437cbf-f177-4524-bd2d-090a67a8ce8a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "eea243fb-97fc-4c65-8699-1b3c321bd250" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.580s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1288.976449] env[69994]: DEBUG nova.policy [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '98f7c85a23ae4567befac26d062aeeab', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '352ad5b68db1480eb657935e006d7dbb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1289.233967] env[69994]: DEBUG nova.network.neutron [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Successfully created port: 06234607-a0e8-40a9-8a07-6f4502407064 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1289.259913] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "16f60d87-180a-4e23-9d4b-960220489d33" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1289.260171] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "16f60d87-180a-4e23-9d4b-960220489d33" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1289.391092] env[69994]: DEBUG nova.compute.manager [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1289.438520] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c396e5a0-9078-4b47-ad4c-ef585477fe29 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.458591] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Updating instance '217bd31d-f705-4aa7-a8a7-d79e407b7c7b' progress to 0 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1289.762053] env[69994]: DEBUG nova.compute.manager [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1289.965334] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1289.965960] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fd661247-464e-49bc-b722-2b334ecead4e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.975135] env[69994]: DEBUG oslo_vmware.api [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1289.975135] env[69994]: value = "task-3242831" [ 1289.975135] env[69994]: _type = "Task" [ 1289.975135] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.984154] env[69994]: DEBUG oslo_vmware.api [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242831, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.282347] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1290.282722] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1290.284336] env[69994]: INFO nova.compute.claims [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1290.401182] env[69994]: DEBUG nova.compute.manager [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1290.408217] env[69994]: DEBUG oslo_concurrency.lockutils [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "7e92935f-fc1f-4893-8f69-4b97e4729a7f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1290.408445] env[69994]: DEBUG oslo_concurrency.lockutils [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "7e92935f-fc1f-4893-8f69-4b97e4729a7f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1290.430252] env[69994]: DEBUG nova.virt.hardware [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1290.430406] env[69994]: DEBUG nova.virt.hardware [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1290.430505] env[69994]: DEBUG nova.virt.hardware [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1290.430687] env[69994]: DEBUG nova.virt.hardware [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1290.430833] env[69994]: DEBUG nova.virt.hardware [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1290.430982] env[69994]: DEBUG nova.virt.hardware [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1290.431201] env[69994]: DEBUG nova.virt.hardware [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1290.431363] env[69994]: DEBUG nova.virt.hardware [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1290.431777] env[69994]: DEBUG nova.virt.hardware [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1290.432014] env[69994]: DEBUG nova.virt.hardware [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1290.432202] env[69994]: DEBUG nova.virt.hardware [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1290.433319] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92cac65f-1681-44d5-acff-8b19b89b3c0c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.442562] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1664910d-1e21-4dd8-90e4-f5a6502dce9f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.485311] env[69994]: DEBUG oslo_vmware.api [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242831, 'name': PowerOffVM_Task, 'duration_secs': 0.2664} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.485568] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1290.485751] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Updating instance '217bd31d-f705-4aa7-a8a7-d79e407b7c7b' progress to 17 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1290.603196] env[69994]: DEBUG nova.compute.manager [req-abaf129b-0af8-462b-aeb5-22824b1b7767 req-0a55aa73-cb93-4f75-941c-aeb5df446a29 service nova] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Received event network-vif-plugged-06234607-a0e8-40a9-8a07-6f4502407064 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1290.603393] env[69994]: DEBUG oslo_concurrency.lockutils [req-abaf129b-0af8-462b-aeb5-22824b1b7767 req-0a55aa73-cb93-4f75-941c-aeb5df446a29 service nova] Acquiring lock "71ee4730-f0e5-4c71-8053-be9e73b702a4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1290.603629] env[69994]: DEBUG oslo_concurrency.lockutils [req-abaf129b-0af8-462b-aeb5-22824b1b7767 req-0a55aa73-cb93-4f75-941c-aeb5df446a29 service nova] Lock "71ee4730-f0e5-4c71-8053-be9e73b702a4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1290.603831] env[69994]: DEBUG oslo_concurrency.lockutils [req-abaf129b-0af8-462b-aeb5-22824b1b7767 req-0a55aa73-cb93-4f75-941c-aeb5df446a29 service nova] Lock "71ee4730-f0e5-4c71-8053-be9e73b702a4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1290.603976] env[69994]: DEBUG nova.compute.manager [req-abaf129b-0af8-462b-aeb5-22824b1b7767 req-0a55aa73-cb93-4f75-941c-aeb5df446a29 service nova] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] No waiting events found dispatching network-vif-plugged-06234607-a0e8-40a9-8a07-6f4502407064 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1290.604151] env[69994]: WARNING nova.compute.manager [req-abaf129b-0af8-462b-aeb5-22824b1b7767 req-0a55aa73-cb93-4f75-941c-aeb5df446a29 service nova] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Received unexpected event network-vif-plugged-06234607-a0e8-40a9-8a07-6f4502407064 for instance with vm_state building and task_state spawning. [ 1290.684459] env[69994]: DEBUG nova.network.neutron [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Successfully updated port: 06234607-a0e8-40a9-8a07-6f4502407064 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1290.912866] env[69994]: DEBUG nova.compute.manager [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1290.992232] env[69994]: DEBUG nova.virt.hardware [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:09Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1290.992489] env[69994]: DEBUG nova.virt.hardware [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1290.992648] env[69994]: DEBUG nova.virt.hardware [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1290.992836] env[69994]: DEBUG nova.virt.hardware [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1290.992985] env[69994]: DEBUG nova.virt.hardware [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1290.993153] env[69994]: DEBUG nova.virt.hardware [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1290.993356] env[69994]: DEBUG nova.virt.hardware [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1290.993550] env[69994]: DEBUG nova.virt.hardware [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1290.993751] env[69994]: DEBUG nova.virt.hardware [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1290.993921] env[69994]: DEBUG nova.virt.hardware [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1290.994112] env[69994]: DEBUG nova.virt.hardware [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1290.999441] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8dc510ec-ce64-47e1-93d9-a10d71e6fbb5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.017207] env[69994]: DEBUG oslo_vmware.api [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1291.017207] env[69994]: value = "task-3242832" [ 1291.017207] env[69994]: _type = "Task" [ 1291.017207] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1291.025843] env[69994]: DEBUG oslo_vmware.api [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242832, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.187450] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquiring lock "refresh_cache-71ee4730-f0e5-4c71-8053-be9e73b702a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1291.187622] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquired lock "refresh_cache-71ee4730-f0e5-4c71-8053-be9e73b702a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1291.187772] env[69994]: DEBUG nova.network.neutron [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1291.430071] env[69994]: DEBUG oslo_concurrency.lockutils [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1291.434142] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c85f35c5-742d-47b1-8056-f7bc318922ba {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.442125] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-128bfb07-bba6-4b59-ab69-c076eb455703 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.473511] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54f63175-fdfc-4c5c-874d-59aebdd8003d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.481270] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d65e419-d5b3-414c-9e93-3d0b688b150b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.494721] env[69994]: DEBUG nova.compute.provider_tree [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1291.526701] env[69994]: DEBUG oslo_vmware.api [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242832, 'name': ReconfigVM_Task, 'duration_secs': 0.368363} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1291.526991] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Updating instance '217bd31d-f705-4aa7-a8a7-d79e407b7c7b' progress to 33 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1291.719323] env[69994]: DEBUG nova.network.neutron [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1291.997735] env[69994]: DEBUG nova.scheduler.client.report [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1292.032472] env[69994]: DEBUG nova.virt.hardware [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1292.032790] env[69994]: DEBUG nova.virt.hardware [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1292.032863] env[69994]: DEBUG nova.virt.hardware [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1292.033052] env[69994]: DEBUG nova.virt.hardware [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1292.033202] env[69994]: DEBUG nova.virt.hardware [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1292.033411] env[69994]: DEBUG nova.virt.hardware [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1292.033597] env[69994]: DEBUG nova.virt.hardware [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1292.033784] env[69994]: DEBUG nova.virt.hardware [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1292.033950] env[69994]: DEBUG nova.virt.hardware [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1292.034424] env[69994]: DEBUG nova.virt.hardware [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1292.034424] env[69994]: DEBUG nova.virt.hardware [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1292.039630] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Reconfiguring VM instance instance-00000070 to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1292.039905] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-85415ad3-4b9b-456c-9930-4083e029a228 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.063703] env[69994]: DEBUG oslo_vmware.api [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1292.063703] env[69994]: value = "task-3242833" [ 1292.063703] env[69994]: _type = "Task" [ 1292.063703] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1292.071853] env[69994]: DEBUG oslo_vmware.api [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242833, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.147810] env[69994]: DEBUG nova.network.neutron [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Updating instance_info_cache with network_info: [{"id": "06234607-a0e8-40a9-8a07-6f4502407064", "address": "fa:16:3e:4a:01:cb", "network": {"id": "24596b1b-4e9c-466d-85f9-ff79760278bd", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1747505141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "352ad5b68db1480eb657935e006d7dbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06234607-a0", "ovs_interfaceid": "06234607-a0e8-40a9-8a07-6f4502407064", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1292.502880] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.220s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1292.503397] env[69994]: DEBUG nova.compute.manager [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1292.506102] env[69994]: DEBUG oslo_concurrency.lockutils [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.076s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1292.507480] env[69994]: INFO nova.compute.claims [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1292.573914] env[69994]: DEBUG oslo_vmware.api [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242833, 'name': ReconfigVM_Task, 'duration_secs': 0.199005} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1292.574192] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Reconfigured VM instance instance-00000070 to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1292.574951] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5f31d67-40e2-4747-a0ef-70005b2caee7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.596679] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] volume-c725763f-b1d7-421a-95e2-cd5644ee630e/volume-c725763f-b1d7-421a-95e2-cd5644ee630e.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1292.596912] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a4090554-59ed-444d-997f-fba790eac035 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.616868] env[69994]: DEBUG oslo_vmware.api [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1292.616868] env[69994]: value = "task-3242834" [ 1292.616868] env[69994]: _type = "Task" [ 1292.616868] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1292.627817] env[69994]: DEBUG oslo_vmware.api [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242834, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.631246] env[69994]: DEBUG nova.compute.manager [req-17fd79fb-eacc-49ca-a107-92b735d8ebf6 req-cc6724db-3514-45e9-8322-768020252115 service nova] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Received event network-changed-06234607-a0e8-40a9-8a07-6f4502407064 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1292.631453] env[69994]: DEBUG nova.compute.manager [req-17fd79fb-eacc-49ca-a107-92b735d8ebf6 req-cc6724db-3514-45e9-8322-768020252115 service nova] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Refreshing instance network info cache due to event network-changed-06234607-a0e8-40a9-8a07-6f4502407064. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1292.631684] env[69994]: DEBUG oslo_concurrency.lockutils [req-17fd79fb-eacc-49ca-a107-92b735d8ebf6 req-cc6724db-3514-45e9-8322-768020252115 service nova] Acquiring lock "refresh_cache-71ee4730-f0e5-4c71-8053-be9e73b702a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1292.650719] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Releasing lock "refresh_cache-71ee4730-f0e5-4c71-8053-be9e73b702a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1292.651079] env[69994]: DEBUG nova.compute.manager [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Instance network_info: |[{"id": "06234607-a0e8-40a9-8a07-6f4502407064", "address": "fa:16:3e:4a:01:cb", "network": {"id": "24596b1b-4e9c-466d-85f9-ff79760278bd", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1747505141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "352ad5b68db1480eb657935e006d7dbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06234607-a0", "ovs_interfaceid": "06234607-a0e8-40a9-8a07-6f4502407064", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1292.651421] env[69994]: DEBUG oslo_concurrency.lockutils [req-17fd79fb-eacc-49ca-a107-92b735d8ebf6 req-cc6724db-3514-45e9-8322-768020252115 service nova] Acquired lock "refresh_cache-71ee4730-f0e5-4c71-8053-be9e73b702a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1292.651648] env[69994]: DEBUG nova.network.neutron [req-17fd79fb-eacc-49ca-a107-92b735d8ebf6 req-cc6724db-3514-45e9-8322-768020252115 service nova] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Refreshing network info cache for port 06234607-a0e8-40a9-8a07-6f4502407064 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1292.652981] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4a:01:cb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24ec44b7-0acf-4ff9-8bb3-4641b74af7a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '06234607-a0e8-40a9-8a07-6f4502407064', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1292.660545] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1292.663412] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1292.664125] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-10eef17b-f225-434e-8314-218777499218 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.686038] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1292.686038] env[69994]: value = "task-3242835" [ 1292.686038] env[69994]: _type = "Task" [ 1292.686038] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1292.694432] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242835, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.873615] env[69994]: DEBUG nova.network.neutron [req-17fd79fb-eacc-49ca-a107-92b735d8ebf6 req-cc6724db-3514-45e9-8322-768020252115 service nova] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Updated VIF entry in instance network info cache for port 06234607-a0e8-40a9-8a07-6f4502407064. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1292.874056] env[69994]: DEBUG nova.network.neutron [req-17fd79fb-eacc-49ca-a107-92b735d8ebf6 req-cc6724db-3514-45e9-8322-768020252115 service nova] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Updating instance_info_cache with network_info: [{"id": "06234607-a0e8-40a9-8a07-6f4502407064", "address": "fa:16:3e:4a:01:cb", "network": {"id": "24596b1b-4e9c-466d-85f9-ff79760278bd", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1747505141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "352ad5b68db1480eb657935e006d7dbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06234607-a0", "ovs_interfaceid": "06234607-a0e8-40a9-8a07-6f4502407064", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1293.011505] env[69994]: DEBUG nova.compute.utils [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1293.015088] env[69994]: DEBUG nova.compute.manager [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1293.015088] env[69994]: DEBUG nova.network.neutron [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1293.049507] env[69994]: DEBUG nova.policy [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '432370da6f1840db8f93b613ca52e31d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '42ee300d6f33459da1deb82b1b14cf74', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1293.128068] env[69994]: DEBUG oslo_vmware.api [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242834, 'name': ReconfigVM_Task, 'duration_secs': 0.312581} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1293.128364] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Reconfigured VM instance instance-00000070 to attach disk [datastore1] volume-c725763f-b1d7-421a-95e2-cd5644ee630e/volume-c725763f-b1d7-421a-95e2-cd5644ee630e.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1293.128631] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Updating instance '217bd31d-f705-4aa7-a8a7-d79e407b7c7b' progress to 50 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1293.195984] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242835, 'name': CreateVM_Task, 'duration_secs': 0.326453} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1293.196171] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1293.196829] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1293.196995] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1293.197322] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1293.197575] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ceccce9-666c-4bfc-809f-9cf779290099 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.203326] env[69994]: DEBUG oslo_vmware.api [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1293.203326] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52e72f29-1412-e4a6-8179-935b80244efa" [ 1293.203326] env[69994]: _type = "Task" [ 1293.203326] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1293.211439] env[69994]: DEBUG oslo_vmware.api [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52e72f29-1412-e4a6-8179-935b80244efa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.332783] env[69994]: DEBUG nova.network.neutron [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Successfully created port: 7109845a-8f7a-4f72-8da2-2640a4c07de4 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1293.376809] env[69994]: DEBUG oslo_concurrency.lockutils [req-17fd79fb-eacc-49ca-a107-92b735d8ebf6 req-cc6724db-3514-45e9-8322-768020252115 service nova] Releasing lock "refresh_cache-71ee4730-f0e5-4c71-8053-be9e73b702a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1293.516386] env[69994]: DEBUG nova.compute.manager [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1293.633659] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06b1ec00-443a-4965-9d79-221d6f33c6b9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.638082] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-226e5fbc-d77b-42a6-a680-0769d4dbd8d2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.659647] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a746f974-be68-400f-9148-c697576bb17c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.663433] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26aeb442-ad36-4e66-8fe1-84c1b7cf6ce1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.681825] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Updating instance '217bd31d-f705-4aa7-a8a7-d79e407b7c7b' progress to 67 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1293.713325] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f59818db-8b1f-47c5-83a9-66adb1e967b1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.721623] env[69994]: DEBUG oslo_vmware.api [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52e72f29-1412-e4a6-8179-935b80244efa, 'name': SearchDatastore_Task, 'duration_secs': 0.010077} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1293.723764] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1293.724011] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1293.724255] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1293.724403] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1293.724608] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1293.725584] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a53f1fad-7611-438e-b261-c6c1a061f5a1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.728118] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe4be06a-ed56-49ed-a779-af1469a57428 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.742901] env[69994]: DEBUG nova.compute.provider_tree [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1293.744910] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1293.745100] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1293.745952] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac5c0aaf-c10e-46cd-8bc7-24253814f63e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.762061] env[69994]: DEBUG oslo_vmware.api [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1293.762061] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52819a95-b4e0-3717-a3fb-fa61858ae1ac" [ 1293.762061] env[69994]: _type = "Task" [ 1293.762061] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1293.769865] env[69994]: DEBUG oslo_vmware.api [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52819a95-b4e0-3717-a3fb-fa61858ae1ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.247837] env[69994]: DEBUG nova.scheduler.client.report [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1294.279105] env[69994]: DEBUG oslo_vmware.api [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52819a95-b4e0-3717-a3fb-fa61858ae1ac, 'name': SearchDatastore_Task, 'duration_secs': 0.009433} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.280134] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-251b74b2-8d6f-4f7d-b4c9-e1e9a1f62c10 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.285833] env[69994]: DEBUG oslo_vmware.api [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1294.285833] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5227b992-e29e-da85-8df8-353289277147" [ 1294.285833] env[69994]: _type = "Task" [ 1294.285833] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1294.294127] env[69994]: DEBUG oslo_vmware.api [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5227b992-e29e-da85-8df8-353289277147, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.525375] env[69994]: DEBUG nova.compute.manager [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1294.551448] env[69994]: DEBUG nova.virt.hardware [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1294.551868] env[69994]: DEBUG nova.virt.hardware [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1294.551949] env[69994]: DEBUG nova.virt.hardware [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1294.552265] env[69994]: DEBUG nova.virt.hardware [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1294.552265] env[69994]: DEBUG nova.virt.hardware [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1294.552409] env[69994]: DEBUG nova.virt.hardware [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1294.552600] env[69994]: DEBUG nova.virt.hardware [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1294.552742] env[69994]: DEBUG nova.virt.hardware [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1294.552905] env[69994]: DEBUG nova.virt.hardware [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1294.553065] env[69994]: DEBUG nova.virt.hardware [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1294.553236] env[69994]: DEBUG nova.virt.hardware [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1294.554668] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af4caf8-8727-4b60-af21-7b3e5e1bb804 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.562999] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef82fe14-96f2-49aa-954b-f15a2236fa8c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.733893] env[69994]: DEBUG nova.compute.manager [req-d21ad57b-89b7-4401-a173-aa5d2dc54321 req-3faa37b7-902d-4935-af8f-59088a864ea8 service nova] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Received event network-vif-plugged-7109845a-8f7a-4f72-8da2-2640a4c07de4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1294.734138] env[69994]: DEBUG oslo_concurrency.lockutils [req-d21ad57b-89b7-4401-a173-aa5d2dc54321 req-3faa37b7-902d-4935-af8f-59088a864ea8 service nova] Acquiring lock "16f60d87-180a-4e23-9d4b-960220489d33-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1294.734374] env[69994]: DEBUG oslo_concurrency.lockutils [req-d21ad57b-89b7-4401-a173-aa5d2dc54321 req-3faa37b7-902d-4935-af8f-59088a864ea8 service nova] Lock "16f60d87-180a-4e23-9d4b-960220489d33-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1294.734495] env[69994]: DEBUG oslo_concurrency.lockutils [req-d21ad57b-89b7-4401-a173-aa5d2dc54321 req-3faa37b7-902d-4935-af8f-59088a864ea8 service nova] Lock "16f60d87-180a-4e23-9d4b-960220489d33-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1294.734688] env[69994]: DEBUG nova.compute.manager [req-d21ad57b-89b7-4401-a173-aa5d2dc54321 req-3faa37b7-902d-4935-af8f-59088a864ea8 service nova] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] No waiting events found dispatching network-vif-plugged-7109845a-8f7a-4f72-8da2-2640a4c07de4 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1294.734858] env[69994]: WARNING nova.compute.manager [req-d21ad57b-89b7-4401-a173-aa5d2dc54321 req-3faa37b7-902d-4935-af8f-59088a864ea8 service nova] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Received unexpected event network-vif-plugged-7109845a-8f7a-4f72-8da2-2640a4c07de4 for instance with vm_state building and task_state spawning. [ 1294.754991] env[69994]: DEBUG oslo_concurrency.lockutils [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.249s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1294.755723] env[69994]: DEBUG nova.compute.manager [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1294.796781] env[69994]: DEBUG oslo_vmware.api [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5227b992-e29e-da85-8df8-353289277147, 'name': SearchDatastore_Task, 'duration_secs': 0.009948} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.797695] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1294.797962] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 71ee4730-f0e5-4c71-8053-be9e73b702a4/71ee4730-f0e5-4c71-8053-be9e73b702a4.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1294.798239] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4ff605e3-a312-473e-ab0a-5ed03ef6119a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.806100] env[69994]: DEBUG oslo_vmware.api [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1294.806100] env[69994]: value = "task-3242836" [ 1294.806100] env[69994]: _type = "Task" [ 1294.806100] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1294.816592] env[69994]: DEBUG oslo_vmware.api [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242836, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.823428] env[69994]: DEBUG nova.network.neutron [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Successfully updated port: 7109845a-8f7a-4f72-8da2-2640a4c07de4 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1295.260991] env[69994]: DEBUG nova.compute.utils [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1295.262570] env[69994]: DEBUG nova.compute.manager [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1295.262851] env[69994]: DEBUG nova.network.neutron [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1295.315916] env[69994]: DEBUG oslo_vmware.api [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242836, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.476967} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.316496] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 71ee4730-f0e5-4c71-8053-be9e73b702a4/71ee4730-f0e5-4c71-8053-be9e73b702a4.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1295.316496] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1295.317793] env[69994]: DEBUG nova.policy [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '08a2b92b6c0141a6a7e301e064032289', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '38d5a89ed7c248c3be506ef12caf5f1e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1295.319342] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9b4686f5-557f-42b0-b1cb-69ff2a396039 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.325967] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "refresh_cache-16f60d87-180a-4e23-9d4b-960220489d33" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1295.326229] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquired lock "refresh_cache-16f60d87-180a-4e23-9d4b-960220489d33" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1295.326316] env[69994]: DEBUG nova.network.neutron [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1295.329460] env[69994]: DEBUG oslo_vmware.api [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1295.329460] env[69994]: value = "task-3242837" [ 1295.329460] env[69994]: _type = "Task" [ 1295.329460] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.338596] env[69994]: DEBUG oslo_vmware.api [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242837, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.356974] env[69994]: DEBUG nova.network.neutron [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Port 068d9b2b-b272-416b-8986-4baa4e3c1270 binding to destination host cpu-1 is already ACTIVE {{(pid=69994) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1295.654175] env[69994]: DEBUG nova.network.neutron [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Successfully created port: 521061f8-5fe8-473b-ba95-6d17064efb80 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1295.766454] env[69994]: DEBUG nova.compute.manager [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1295.840883] env[69994]: DEBUG oslo_vmware.api [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242837, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070958} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.841256] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1295.842068] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dc33e39-707b-4f92-ba9e-6289e4fb5dea {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.864632] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Reconfiguring VM instance instance-00000072 to attach disk [datastore2] 71ee4730-f0e5-4c71-8053-be9e73b702a4/71ee4730-f0e5-4c71-8053-be9e73b702a4.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1295.865606] env[69994]: DEBUG nova.network.neutron [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1295.871584] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dc4e0dde-bb15-443a-8544-17cef81cac72 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.896184] env[69994]: DEBUG oslo_vmware.api [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1295.896184] env[69994]: value = "task-3242838" [ 1295.896184] env[69994]: _type = "Task" [ 1295.896184] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.904878] env[69994]: DEBUG oslo_vmware.api [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242838, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.020611] env[69994]: DEBUG nova.network.neutron [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Updating instance_info_cache with network_info: [{"id": "7109845a-8f7a-4f72-8da2-2640a4c07de4", "address": "fa:16:3e:71:ab:e6", "network": {"id": "d0928b0d-e338-4b93-b259-f37cac9dde85", "bridge": "br-int", "label": "tempest-ServersTestJSON-727748163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "42ee300d6f33459da1deb82b1b14cf74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089ef678-58b4-4bf0-a39d-b94b2d364291", "external-id": "nsx-vlan-transportzone-675", "segmentation_id": 675, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7109845a-8f", "ovs_interfaceid": "7109845a-8f7a-4f72-8da2-2640a4c07de4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1296.385733] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "217bd31d-f705-4aa7-a8a7-d79e407b7c7b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1296.385733] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "217bd31d-f705-4aa7-a8a7-d79e407b7c7b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1296.385733] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "217bd31d-f705-4aa7-a8a7-d79e407b7c7b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1296.408175] env[69994]: DEBUG oslo_vmware.api [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242838, 'name': ReconfigVM_Task, 'duration_secs': 0.331109} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.408450] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Reconfigured VM instance instance-00000072 to attach disk [datastore2] 71ee4730-f0e5-4c71-8053-be9e73b702a4/71ee4730-f0e5-4c71-8053-be9e73b702a4.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1296.409090] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-826958af-1941-4e3f-abd6-b43f43ccc229 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.415875] env[69994]: DEBUG oslo_vmware.api [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1296.415875] env[69994]: value = "task-3242839" [ 1296.415875] env[69994]: _type = "Task" [ 1296.415875] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.425798] env[69994]: DEBUG oslo_vmware.api [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242839, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.523863] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Releasing lock "refresh_cache-16f60d87-180a-4e23-9d4b-960220489d33" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1296.524275] env[69994]: DEBUG nova.compute.manager [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Instance network_info: |[{"id": "7109845a-8f7a-4f72-8da2-2640a4c07de4", "address": "fa:16:3e:71:ab:e6", "network": {"id": "d0928b0d-e338-4b93-b259-f37cac9dde85", "bridge": "br-int", "label": "tempest-ServersTestJSON-727748163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "42ee300d6f33459da1deb82b1b14cf74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089ef678-58b4-4bf0-a39d-b94b2d364291", "external-id": "nsx-vlan-transportzone-675", "segmentation_id": 675, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7109845a-8f", "ovs_interfaceid": "7109845a-8f7a-4f72-8da2-2640a4c07de4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1296.524833] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:71:ab:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '089ef678-58b4-4bf0-a39d-b94b2d364291', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7109845a-8f7a-4f72-8da2-2640a4c07de4', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1296.533075] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1296.533241] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1296.533444] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0078d43f-83d6-491a-8a24-ceb2c07e876a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.553955] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1296.553955] env[69994]: value = "task-3242840" [ 1296.553955] env[69994]: _type = "Task" [ 1296.553955] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.567939] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242840, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.763750] env[69994]: DEBUG nova.compute.manager [req-1c489294-c0fe-467c-88cd-43eb5829a943 req-c81ace18-501f-4ed3-af94-7994f455d982 service nova] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Received event network-changed-7109845a-8f7a-4f72-8da2-2640a4c07de4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1296.764096] env[69994]: DEBUG nova.compute.manager [req-1c489294-c0fe-467c-88cd-43eb5829a943 req-c81ace18-501f-4ed3-af94-7994f455d982 service nova] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Refreshing instance network info cache due to event network-changed-7109845a-8f7a-4f72-8da2-2640a4c07de4. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1296.764296] env[69994]: DEBUG oslo_concurrency.lockutils [req-1c489294-c0fe-467c-88cd-43eb5829a943 req-c81ace18-501f-4ed3-af94-7994f455d982 service nova] Acquiring lock "refresh_cache-16f60d87-180a-4e23-9d4b-960220489d33" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1296.764296] env[69994]: DEBUG oslo_concurrency.lockutils [req-1c489294-c0fe-467c-88cd-43eb5829a943 req-c81ace18-501f-4ed3-af94-7994f455d982 service nova] Acquired lock "refresh_cache-16f60d87-180a-4e23-9d4b-960220489d33" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1296.764468] env[69994]: DEBUG nova.network.neutron [req-1c489294-c0fe-467c-88cd-43eb5829a943 req-c81ace18-501f-4ed3-af94-7994f455d982 service nova] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Refreshing network info cache for port 7109845a-8f7a-4f72-8da2-2640a4c07de4 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1296.776841] env[69994]: DEBUG nova.compute.manager [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1296.806729] env[69994]: DEBUG nova.virt.hardware [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1296.806993] env[69994]: DEBUG nova.virt.hardware [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1296.807171] env[69994]: DEBUG nova.virt.hardware [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1296.807386] env[69994]: DEBUG nova.virt.hardware [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1296.807615] env[69994]: DEBUG nova.virt.hardware [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1296.807806] env[69994]: DEBUG nova.virt.hardware [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1296.808053] env[69994]: DEBUG nova.virt.hardware [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1296.808242] env[69994]: DEBUG nova.virt.hardware [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1296.808426] env[69994]: DEBUG nova.virt.hardware [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1296.808601] env[69994]: DEBUG nova.virt.hardware [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1296.808791] env[69994]: DEBUG nova.virt.hardware [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1296.809685] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fa750d7-6be9-4408-8be6-5bc389ec8a79 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.819298] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac762012-f482-43f3-8695-6ca21871d45f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.927179] env[69994]: DEBUG oslo_vmware.api [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242839, 'name': Rename_Task, 'duration_secs': 0.153194} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.927455] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1296.927694] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-37a2b5a9-052f-42f3-94d0-300481fdda0e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.935062] env[69994]: DEBUG oslo_vmware.api [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1296.935062] env[69994]: value = "task-3242841" [ 1296.935062] env[69994]: _type = "Task" [ 1296.935062] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.942477] env[69994]: DEBUG oslo_vmware.api [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242841, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.065203] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242840, 'name': CreateVM_Task, 'duration_secs': 0.460094} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.065376] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1297.066111] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1297.066291] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1297.066637] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1297.066906] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4fcf9ab-56de-4e94-b7ea-653cb653e930 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.072701] env[69994]: DEBUG oslo_vmware.api [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1297.072701] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52b43ab4-2c06-f896-909d-a8998ed64c0e" [ 1297.072701] env[69994]: _type = "Task" [ 1297.072701] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.080793] env[69994]: DEBUG oslo_vmware.api [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52b43ab4-2c06-f896-909d-a8998ed64c0e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.216886] env[69994]: DEBUG nova.network.neutron [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Successfully updated port: 521061f8-5fe8-473b-ba95-6d17064efb80 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1297.420082] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "refresh_cache-217bd31d-f705-4aa7-a8a7-d79e407b7c7b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1297.420282] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquired lock "refresh_cache-217bd31d-f705-4aa7-a8a7-d79e407b7c7b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1297.420462] env[69994]: DEBUG nova.network.neutron [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1297.445399] env[69994]: DEBUG oslo_vmware.api [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242841, 'name': PowerOnVM_Task, 'duration_secs': 0.461051} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.445667] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1297.445867] env[69994]: INFO nova.compute.manager [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Took 7.04 seconds to spawn the instance on the hypervisor. [ 1297.446087] env[69994]: DEBUG nova.compute.manager [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1297.446843] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-208f4366-e7d7-49fc-b495-910b1c946c58 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.499240] env[69994]: DEBUG nova.network.neutron [req-1c489294-c0fe-467c-88cd-43eb5829a943 req-c81ace18-501f-4ed3-af94-7994f455d982 service nova] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Updated VIF entry in instance network info cache for port 7109845a-8f7a-4f72-8da2-2640a4c07de4. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1297.499661] env[69994]: DEBUG nova.network.neutron [req-1c489294-c0fe-467c-88cd-43eb5829a943 req-c81ace18-501f-4ed3-af94-7994f455d982 service nova] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Updating instance_info_cache with network_info: [{"id": "7109845a-8f7a-4f72-8da2-2640a4c07de4", "address": "fa:16:3e:71:ab:e6", "network": {"id": "d0928b0d-e338-4b93-b259-f37cac9dde85", "bridge": "br-int", "label": "tempest-ServersTestJSON-727748163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "42ee300d6f33459da1deb82b1b14cf74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089ef678-58b4-4bf0-a39d-b94b2d364291", "external-id": "nsx-vlan-transportzone-675", "segmentation_id": 675, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7109845a-8f", "ovs_interfaceid": "7109845a-8f7a-4f72-8da2-2640a4c07de4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1297.583165] env[69994]: DEBUG oslo_vmware.api [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52b43ab4-2c06-f896-909d-a8998ed64c0e, 'name': SearchDatastore_Task, 'duration_secs': 0.010289} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.583447] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1297.583697] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1297.583936] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1297.584099] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1297.584278] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1297.584531] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-360922f7-e54f-4a3a-9fa2-8db8b8c325c3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.592726] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1297.592895] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1297.593568] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-578ee8ae-55af-4c05-981c-e2a6294dce5f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.599824] env[69994]: DEBUG oslo_vmware.api [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1297.599824] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5212a242-c803-561b-97c4-912c6b9c2c4d" [ 1297.599824] env[69994]: _type = "Task" [ 1297.599824] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.606965] env[69994]: DEBUG oslo_vmware.api [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5212a242-c803-561b-97c4-912c6b9c2c4d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.722726] env[69994]: DEBUG oslo_concurrency.lockutils [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "refresh_cache-7e92935f-fc1f-4893-8f69-4b97e4729a7f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1297.722726] env[69994]: DEBUG oslo_concurrency.lockutils [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquired lock "refresh_cache-7e92935f-fc1f-4893-8f69-4b97e4729a7f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1297.722726] env[69994]: DEBUG nova.network.neutron [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1297.964048] env[69994]: INFO nova.compute.manager [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Took 11.85 seconds to build instance. [ 1298.003161] env[69994]: DEBUG oslo_concurrency.lockutils [req-1c489294-c0fe-467c-88cd-43eb5829a943 req-c81ace18-501f-4ed3-af94-7994f455d982 service nova] Releasing lock "refresh_cache-16f60d87-180a-4e23-9d4b-960220489d33" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1298.112749] env[69994]: DEBUG oslo_vmware.api [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5212a242-c803-561b-97c4-912c6b9c2c4d, 'name': SearchDatastore_Task, 'duration_secs': 0.009245} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.113457] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a3351dc-dbd6-42d0-b95d-de36c3ca9428 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.119745] env[69994]: DEBUG oslo_vmware.api [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1298.119745] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]520f24b4-b7a1-7696-3fdb-a6415f5e2180" [ 1298.119745] env[69994]: _type = "Task" [ 1298.119745] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.128204] env[69994]: DEBUG oslo_vmware.api [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]520f24b4-b7a1-7696-3fdb-a6415f5e2180, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.160172] env[69994]: DEBUG nova.network.neutron [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Updating instance_info_cache with network_info: [{"id": "068d9b2b-b272-416b-8986-4baa4e3c1270", "address": "fa:16:3e:9c:df:58", "network": {"id": "84a79aab-0d1e-4aaf-9422-316cb7d239fb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1732515887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.240", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccb64f97e46a4e499df974959db53dcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap068d9b2b-b2", "ovs_interfaceid": "068d9b2b-b272-416b-8986-4baa4e3c1270", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1298.250032] env[69994]: DEBUG nova.network.neutron [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1298.368963] env[69994]: DEBUG nova.network.neutron [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Updating instance_info_cache with network_info: [{"id": "521061f8-5fe8-473b-ba95-6d17064efb80", "address": "fa:16:3e:ca:b0:f9", "network": {"id": "ac8cc640-01c3-4a64-8b69-1458ee2131a1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1685085861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38d5a89ed7c248c3be506ef12caf5f1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap521061f8-5f", "ovs_interfaceid": "521061f8-5fe8-473b-ba95-6d17064efb80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1298.465758] env[69994]: DEBUG oslo_concurrency.lockutils [None req-e30db2ef-ae67-49d8-9147-b5308fdb4f50 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lock "71ee4730-f0e5-4c71-8053-be9e73b702a4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.361s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1298.633173] env[69994]: DEBUG oslo_vmware.api [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]520f24b4-b7a1-7696-3fdb-a6415f5e2180, 'name': SearchDatastore_Task, 'duration_secs': 0.010872} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.633443] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1298.633745] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 16f60d87-180a-4e23-9d4b-960220489d33/16f60d87-180a-4e23-9d4b-960220489d33.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1298.634034] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0144a0fc-db20-4cea-b55d-d729b850830c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.641819] env[69994]: DEBUG oslo_vmware.api [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1298.641819] env[69994]: value = "task-3242842" [ 1298.641819] env[69994]: _type = "Task" [ 1298.641819] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.649217] env[69994]: DEBUG nova.compute.manager [req-88e335b3-f851-48c9-9d71-179b606c1d59 req-98d1a57b-4ba2-4801-ba4b-f12ecaa9f473 service nova] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Received event network-changed-06234607-a0e8-40a9-8a07-6f4502407064 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1298.649217] env[69994]: DEBUG nova.compute.manager [req-88e335b3-f851-48c9-9d71-179b606c1d59 req-98d1a57b-4ba2-4801-ba4b-f12ecaa9f473 service nova] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Refreshing instance network info cache due to event network-changed-06234607-a0e8-40a9-8a07-6f4502407064. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1298.650084] env[69994]: DEBUG oslo_concurrency.lockutils [req-88e335b3-f851-48c9-9d71-179b606c1d59 req-98d1a57b-4ba2-4801-ba4b-f12ecaa9f473 service nova] Acquiring lock "refresh_cache-71ee4730-f0e5-4c71-8053-be9e73b702a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1298.650084] env[69994]: DEBUG oslo_concurrency.lockutils [req-88e335b3-f851-48c9-9d71-179b606c1d59 req-98d1a57b-4ba2-4801-ba4b-f12ecaa9f473 service nova] Acquired lock "refresh_cache-71ee4730-f0e5-4c71-8053-be9e73b702a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1298.650084] env[69994]: DEBUG nova.network.neutron [req-88e335b3-f851-48c9-9d71-179b606c1d59 req-98d1a57b-4ba2-4801-ba4b-f12ecaa9f473 service nova] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Refreshing network info cache for port 06234607-a0e8-40a9-8a07-6f4502407064 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1298.654750] env[69994]: DEBUG oslo_vmware.api [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242842, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.662367] env[69994]: DEBUG oslo_concurrency.lockutils [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Releasing lock "refresh_cache-217bd31d-f705-4aa7-a8a7-d79e407b7c7b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1298.791239] env[69994]: DEBUG nova.compute.manager [req-110b4a9c-faf3-4e64-bbbd-cc285410a621 req-d5afe929-abe8-4b70-b132-3af33badbf69 service nova] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Received event network-vif-plugged-521061f8-5fe8-473b-ba95-6d17064efb80 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1298.791486] env[69994]: DEBUG oslo_concurrency.lockutils [req-110b4a9c-faf3-4e64-bbbd-cc285410a621 req-d5afe929-abe8-4b70-b132-3af33badbf69 service nova] Acquiring lock "7e92935f-fc1f-4893-8f69-4b97e4729a7f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1298.791680] env[69994]: DEBUG oslo_concurrency.lockutils [req-110b4a9c-faf3-4e64-bbbd-cc285410a621 req-d5afe929-abe8-4b70-b132-3af33badbf69 service nova] Lock "7e92935f-fc1f-4893-8f69-4b97e4729a7f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1298.791822] env[69994]: DEBUG oslo_concurrency.lockutils [req-110b4a9c-faf3-4e64-bbbd-cc285410a621 req-d5afe929-abe8-4b70-b132-3af33badbf69 service nova] Lock "7e92935f-fc1f-4893-8f69-4b97e4729a7f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1298.791999] env[69994]: DEBUG nova.compute.manager [req-110b4a9c-faf3-4e64-bbbd-cc285410a621 req-d5afe929-abe8-4b70-b132-3af33badbf69 service nova] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] No waiting events found dispatching network-vif-plugged-521061f8-5fe8-473b-ba95-6d17064efb80 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1298.792235] env[69994]: WARNING nova.compute.manager [req-110b4a9c-faf3-4e64-bbbd-cc285410a621 req-d5afe929-abe8-4b70-b132-3af33badbf69 service nova] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Received unexpected event network-vif-plugged-521061f8-5fe8-473b-ba95-6d17064efb80 for instance with vm_state building and task_state spawning. [ 1298.792400] env[69994]: DEBUG nova.compute.manager [req-110b4a9c-faf3-4e64-bbbd-cc285410a621 req-d5afe929-abe8-4b70-b132-3af33badbf69 service nova] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Received event network-changed-521061f8-5fe8-473b-ba95-6d17064efb80 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1298.792556] env[69994]: DEBUG nova.compute.manager [req-110b4a9c-faf3-4e64-bbbd-cc285410a621 req-d5afe929-abe8-4b70-b132-3af33badbf69 service nova] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Refreshing instance network info cache due to event network-changed-521061f8-5fe8-473b-ba95-6d17064efb80. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1298.792722] env[69994]: DEBUG oslo_concurrency.lockutils [req-110b4a9c-faf3-4e64-bbbd-cc285410a621 req-d5afe929-abe8-4b70-b132-3af33badbf69 service nova] Acquiring lock "refresh_cache-7e92935f-fc1f-4893-8f69-4b97e4729a7f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1298.871613] env[69994]: DEBUG oslo_concurrency.lockutils [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Releasing lock "refresh_cache-7e92935f-fc1f-4893-8f69-4b97e4729a7f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1298.871955] env[69994]: DEBUG nova.compute.manager [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Instance network_info: |[{"id": "521061f8-5fe8-473b-ba95-6d17064efb80", "address": "fa:16:3e:ca:b0:f9", "network": {"id": "ac8cc640-01c3-4a64-8b69-1458ee2131a1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1685085861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38d5a89ed7c248c3be506ef12caf5f1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap521061f8-5f", "ovs_interfaceid": "521061f8-5fe8-473b-ba95-6d17064efb80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1298.872488] env[69994]: DEBUG oslo_concurrency.lockutils [req-110b4a9c-faf3-4e64-bbbd-cc285410a621 req-d5afe929-abe8-4b70-b132-3af33badbf69 service nova] Acquired lock "refresh_cache-7e92935f-fc1f-4893-8f69-4b97e4729a7f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1298.872749] env[69994]: DEBUG nova.network.neutron [req-110b4a9c-faf3-4e64-bbbd-cc285410a621 req-d5afe929-abe8-4b70-b132-3af33badbf69 service nova] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Refreshing network info cache for port 521061f8-5fe8-473b-ba95-6d17064efb80 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1298.874344] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ca:b0:f9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ddfb706a-add1-4e16-9ac4-d20b16a1df6d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '521061f8-5fe8-473b-ba95-6d17064efb80', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1298.883113] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1298.884297] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1298.884535] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9c87cca1-5534-4a90-9b49-da418f3ca7a2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.909463] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1298.909463] env[69994]: value = "task-3242843" [ 1298.909463] env[69994]: _type = "Task" [ 1298.909463] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.921818] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242843, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.154918] env[69994]: DEBUG oslo_vmware.api [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242842, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.504953} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.155223] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 16f60d87-180a-4e23-9d4b-960220489d33/16f60d87-180a-4e23-9d4b-960220489d33.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1299.155400] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1299.155983] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6d24a4f5-6a59-4004-ac96-1c44c5d71375 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.163248] env[69994]: DEBUG oslo_vmware.api [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1299.163248] env[69994]: value = "task-3242844" [ 1299.163248] env[69994]: _type = "Task" [ 1299.163248] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.174940] env[69994]: DEBUG oslo_vmware.api [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242844, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.175751] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ac6f8a-7b47-456d-ac51-87da85bec5ba {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.185157] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13a5b3ff-b60e-4a29-ab34-187ca698348f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.367666] env[69994]: DEBUG nova.network.neutron [req-88e335b3-f851-48c9-9d71-179b606c1d59 req-98d1a57b-4ba2-4801-ba4b-f12ecaa9f473 service nova] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Updated VIF entry in instance network info cache for port 06234607-a0e8-40a9-8a07-6f4502407064. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1299.367666] env[69994]: DEBUG nova.network.neutron [req-88e335b3-f851-48c9-9d71-179b606c1d59 req-98d1a57b-4ba2-4801-ba4b-f12ecaa9f473 service nova] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Updating instance_info_cache with network_info: [{"id": "06234607-a0e8-40a9-8a07-6f4502407064", "address": "fa:16:3e:4a:01:cb", "network": {"id": "24596b1b-4e9c-466d-85f9-ff79760278bd", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1747505141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.182", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "352ad5b68db1480eb657935e006d7dbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06234607-a0", "ovs_interfaceid": "06234607-a0e8-40a9-8a07-6f4502407064", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1299.421859] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242843, 'name': CreateVM_Task, 'duration_secs': 0.407938} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.422109] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1299.422667] env[69994]: DEBUG oslo_concurrency.lockutils [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1299.423051] env[69994]: DEBUG oslo_concurrency.lockutils [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1299.423127] env[69994]: DEBUG oslo_concurrency.lockutils [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1299.423383] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-213384ad-545d-4cfa-ade2-2560a397f834 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.428213] env[69994]: DEBUG oslo_vmware.api [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1299.428213] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52536b56-39fe-8c85-c6bb-75f036f0e3ca" [ 1299.428213] env[69994]: _type = "Task" [ 1299.428213] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.436088] env[69994]: DEBUG oslo_vmware.api [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52536b56-39fe-8c85-c6bb-75f036f0e3ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.559031] env[69994]: DEBUG nova.network.neutron [req-110b4a9c-faf3-4e64-bbbd-cc285410a621 req-d5afe929-abe8-4b70-b132-3af33badbf69 service nova] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Updated VIF entry in instance network info cache for port 521061f8-5fe8-473b-ba95-6d17064efb80. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1299.559364] env[69994]: DEBUG nova.network.neutron [req-110b4a9c-faf3-4e64-bbbd-cc285410a621 req-d5afe929-abe8-4b70-b132-3af33badbf69 service nova] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Updating instance_info_cache with network_info: [{"id": "521061f8-5fe8-473b-ba95-6d17064efb80", "address": "fa:16:3e:ca:b0:f9", "network": {"id": "ac8cc640-01c3-4a64-8b69-1458ee2131a1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1685085861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38d5a89ed7c248c3be506ef12caf5f1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap521061f8-5f", "ovs_interfaceid": "521061f8-5fe8-473b-ba95-6d17064efb80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1299.673220] env[69994]: DEBUG oslo_vmware.api [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242844, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072376} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.673499] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1299.674315] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4b09d11-2e3d-4bbe-b044-89b3a6003d8d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.696739] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Reconfiguring VM instance instance-00000073 to attach disk [datastore2] 16f60d87-180a-4e23-9d4b-960220489d33/16f60d87-180a-4e23-9d4b-960220489d33.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1299.697028] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7f6c60fa-3f57-4504-ace7-24d574d6912b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.717051] env[69994]: DEBUG oslo_vmware.api [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1299.717051] env[69994]: value = "task-3242845" [ 1299.717051] env[69994]: _type = "Task" [ 1299.717051] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.724986] env[69994]: DEBUG oslo_vmware.api [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242845, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.870497] env[69994]: DEBUG oslo_concurrency.lockutils [req-88e335b3-f851-48c9-9d71-179b606c1d59 req-98d1a57b-4ba2-4801-ba4b-f12ecaa9f473 service nova] Releasing lock "refresh_cache-71ee4730-f0e5-4c71-8053-be9e73b702a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1299.940128] env[69994]: DEBUG oslo_vmware.api [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52536b56-39fe-8c85-c6bb-75f036f0e3ca, 'name': SearchDatastore_Task, 'duration_secs': 0.010532} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.940445] env[69994]: DEBUG oslo_concurrency.lockutils [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1299.940827] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1299.941162] env[69994]: DEBUG oslo_concurrency.lockutils [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1299.941323] env[69994]: DEBUG oslo_concurrency.lockutils [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1299.941516] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1299.941805] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e0281a3b-3b38-4cdf-8b4d-1d3eab61a61f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.951330] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1299.951504] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1299.952264] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b19c2ad9-7b1d-490a-8e32-cfacb976b3a5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.958139] env[69994]: DEBUG oslo_vmware.api [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1299.958139] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52ac0d01-584a-9ac6-4dd6-48ccdfab6abd" [ 1299.958139] env[69994]: _type = "Task" [ 1299.958139] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.967070] env[69994]: DEBUG oslo_vmware.api [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52ac0d01-584a-9ac6-4dd6-48ccdfab6abd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.062715] env[69994]: DEBUG oslo_concurrency.lockutils [req-110b4a9c-faf3-4e64-bbbd-cc285410a621 req-d5afe929-abe8-4b70-b132-3af33badbf69 service nova] Releasing lock "refresh_cache-7e92935f-fc1f-4893-8f69-4b97e4729a7f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1300.229146] env[69994]: DEBUG oslo_vmware.api [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242845, 'name': ReconfigVM_Task, 'duration_secs': 0.303846} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.229543] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Reconfigured VM instance instance-00000073 to attach disk [datastore2] 16f60d87-180a-4e23-9d4b-960220489d33/16f60d87-180a-4e23-9d4b-960220489d33.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1300.230037] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2e580130-cf81-4866-8fd2-4e2c2ac4ea98 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.237872] env[69994]: DEBUG oslo_vmware.api [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1300.237872] env[69994]: value = "task-3242846" [ 1300.237872] env[69994]: _type = "Task" [ 1300.237872] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.245920] env[69994]: DEBUG oslo_vmware.api [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242846, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.294170] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac4f91b1-7ca9-4bcc-9b42-47884de57c1e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.316461] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6deb594a-2101-48dc-8f20-8eb4f38170ff {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.324186] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Updating instance '217bd31d-f705-4aa7-a8a7-d79e407b7c7b' progress to 83 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1300.470040] env[69994]: DEBUG oslo_vmware.api [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52ac0d01-584a-9ac6-4dd6-48ccdfab6abd, 'name': SearchDatastore_Task, 'duration_secs': 0.018808} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.470828] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87451a5a-d6d3-482d-8dba-e647e4fadf89 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.476435] env[69994]: DEBUG oslo_vmware.api [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1300.476435] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5262804a-f2fe-dec1-8fd3-7dbdf409c9de" [ 1300.476435] env[69994]: _type = "Task" [ 1300.476435] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.484290] env[69994]: DEBUG oslo_vmware.api [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5262804a-f2fe-dec1-8fd3-7dbdf409c9de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.749415] env[69994]: DEBUG oslo_vmware.api [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242846, 'name': Rename_Task, 'duration_secs': 0.143764} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.749672] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1300.749882] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2270e730-fe43-4480-a790-25e114141908 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.763087] env[69994]: DEBUG oslo_vmware.api [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1300.763087] env[69994]: value = "task-3242847" [ 1300.763087] env[69994]: _type = "Task" [ 1300.763087] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.773110] env[69994]: DEBUG oslo_vmware.api [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242847, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.830317] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1300.830636] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8b6dd969-de8c-498f-a696-da97e8facd17 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.838732] env[69994]: DEBUG oslo_vmware.api [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1300.838732] env[69994]: value = "task-3242848" [ 1300.838732] env[69994]: _type = "Task" [ 1300.838732] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.847295] env[69994]: DEBUG oslo_vmware.api [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242848, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.987603] env[69994]: DEBUG oslo_vmware.api [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5262804a-f2fe-dec1-8fd3-7dbdf409c9de, 'name': SearchDatastore_Task, 'duration_secs': 0.010157} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.987889] env[69994]: DEBUG oslo_concurrency.lockutils [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1300.988168] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 7e92935f-fc1f-4893-8f69-4b97e4729a7f/7e92935f-fc1f-4893-8f69-4b97e4729a7f.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1300.988431] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-158be3ca-4018-45aa-b599-97c68f2b7730 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.996689] env[69994]: DEBUG oslo_vmware.api [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1300.996689] env[69994]: value = "task-3242849" [ 1300.996689] env[69994]: _type = "Task" [ 1300.996689] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.004802] env[69994]: DEBUG oslo_vmware.api [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242849, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.274447] env[69994]: DEBUG oslo_vmware.api [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242847, 'name': PowerOnVM_Task, 'duration_secs': 0.487295} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.274809] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1301.274937] env[69994]: INFO nova.compute.manager [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Took 6.75 seconds to spawn the instance on the hypervisor. [ 1301.275133] env[69994]: DEBUG nova.compute.manager [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1301.275979] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66c81210-73f2-4ad0-9680-84c70257dea1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.350182] env[69994]: DEBUG oslo_vmware.api [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242848, 'name': PowerOnVM_Task, 'duration_secs': 0.410578} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.350448] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1301.350637] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-fd84af49-2f7b-49db-9128-402bd090573e tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Updating instance '217bd31d-f705-4aa7-a8a7-d79e407b7c7b' progress to 100 {{(pid=69994) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1301.507291] env[69994]: DEBUG oslo_vmware.api [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242849, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.795347] env[69994]: INFO nova.compute.manager [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Took 11.53 seconds to build instance. [ 1302.007794] env[69994]: DEBUG oslo_vmware.api [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242849, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.524286} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1302.008092] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 7e92935f-fc1f-4893-8f69-4b97e4729a7f/7e92935f-fc1f-4893-8f69-4b97e4729a7f.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1302.008316] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1302.008575] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ae9fec10-ee2c-42c9-af54-ec0b08d0c406 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.016523] env[69994]: DEBUG oslo_vmware.api [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1302.016523] env[69994]: value = "task-3242850" [ 1302.016523] env[69994]: _type = "Task" [ 1302.016523] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1302.025580] env[69994]: DEBUG oslo_vmware.api [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242850, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.297384] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d2089e4-c695-4edd-8568-2c8eefd5c199 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "16f60d87-180a-4e23-9d4b-960220489d33" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.037s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1302.529550] env[69994]: DEBUG oslo_vmware.api [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242850, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076291} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1302.529881] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1302.530690] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eae38e8-4172-4abe-b23c-d595ac2aebf5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.557349] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Reconfiguring VM instance instance-00000074 to attach disk [datastore1] 7e92935f-fc1f-4893-8f69-4b97e4729a7f/7e92935f-fc1f-4893-8f69-4b97e4729a7f.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1302.557516] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1e724abf-e00a-487e-83ac-2fc34782d996 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.582350] env[69994]: DEBUG oslo_vmware.api [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1302.582350] env[69994]: value = "task-3242851" [ 1302.582350] env[69994]: _type = "Task" [ 1302.582350] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1302.590736] env[69994]: DEBUG oslo_vmware.api [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242851, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.680554] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f14e82d1-f02e-4a2e-a07f-a6d5cdec3574 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "16f60d87-180a-4e23-9d4b-960220489d33" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1302.680808] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f14e82d1-f02e-4a2e-a07f-a6d5cdec3574 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "16f60d87-180a-4e23-9d4b-960220489d33" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1302.681073] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f14e82d1-f02e-4a2e-a07f-a6d5cdec3574 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "16f60d87-180a-4e23-9d4b-960220489d33-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1302.683496] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f14e82d1-f02e-4a2e-a07f-a6d5cdec3574 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "16f60d87-180a-4e23-9d4b-960220489d33-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.002s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1302.683589] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f14e82d1-f02e-4a2e-a07f-a6d5cdec3574 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "16f60d87-180a-4e23-9d4b-960220489d33-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1302.685876] env[69994]: INFO nova.compute.manager [None req-f14e82d1-f02e-4a2e-a07f-a6d5cdec3574 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Terminating instance [ 1303.094368] env[69994]: DEBUG oslo_vmware.api [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242851, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.172294] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d472fac-d5dc-4c11-87dd-b04d45f28968 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "217bd31d-f705-4aa7-a8a7-d79e407b7c7b" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1303.172560] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d472fac-d5dc-4c11-87dd-b04d45f28968 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "217bd31d-f705-4aa7-a8a7-d79e407b7c7b" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1303.172754] env[69994]: DEBUG nova.compute.manager [None req-7d472fac-d5dc-4c11-87dd-b04d45f28968 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Going to confirm migration 10 {{(pid=69994) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1303.190194] env[69994]: DEBUG nova.compute.manager [None req-f14e82d1-f02e-4a2e-a07f-a6d5cdec3574 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1303.190403] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f14e82d1-f02e-4a2e-a07f-a6d5cdec3574 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1303.191951] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-223e7894-9ce2-419c-8215-3f82e6af4908 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.201473] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f14e82d1-f02e-4a2e-a07f-a6d5cdec3574 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1303.201737] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e7209934-dd3e-42c7-9035-1cbeee545d6e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.208623] env[69994]: DEBUG oslo_vmware.api [None req-f14e82d1-f02e-4a2e-a07f-a6d5cdec3574 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1303.208623] env[69994]: value = "task-3242852" [ 1303.208623] env[69994]: _type = "Task" [ 1303.208623] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1303.218434] env[69994]: DEBUG oslo_vmware.api [None req-f14e82d1-f02e-4a2e-a07f-a6d5cdec3574 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242852, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.597181] env[69994]: DEBUG oslo_vmware.api [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242851, 'name': ReconfigVM_Task, 'duration_secs': 0.596539} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1303.597557] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Reconfigured VM instance instance-00000074 to attach disk [datastore1] 7e92935f-fc1f-4893-8f69-4b97e4729a7f/7e92935f-fc1f-4893-8f69-4b97e4729a7f.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1303.598346] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2560c6a0-0999-4fe7-8fc3-27a0a5c03ba9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.607110] env[69994]: DEBUG oslo_vmware.api [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1303.607110] env[69994]: value = "task-3242853" [ 1303.607110] env[69994]: _type = "Task" [ 1303.607110] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1303.615617] env[69994]: DEBUG oslo_vmware.api [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242853, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.715307] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d472fac-d5dc-4c11-87dd-b04d45f28968 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "refresh_cache-217bd31d-f705-4aa7-a8a7-d79e407b7c7b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1303.715541] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d472fac-d5dc-4c11-87dd-b04d45f28968 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquired lock "refresh_cache-217bd31d-f705-4aa7-a8a7-d79e407b7c7b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1303.715752] env[69994]: DEBUG nova.network.neutron [None req-7d472fac-d5dc-4c11-87dd-b04d45f28968 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1303.715946] env[69994]: DEBUG nova.objects.instance [None req-7d472fac-d5dc-4c11-87dd-b04d45f28968 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lazy-loading 'info_cache' on Instance uuid 217bd31d-f705-4aa7-a8a7-d79e407b7c7b {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1303.720555] env[69994]: DEBUG oslo_vmware.api [None req-f14e82d1-f02e-4a2e-a07f-a6d5cdec3574 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242852, 'name': PowerOffVM_Task, 'duration_secs': 0.221104} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1303.721047] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f14e82d1-f02e-4a2e-a07f-a6d5cdec3574 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1303.721244] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f14e82d1-f02e-4a2e-a07f-a6d5cdec3574 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1303.721492] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-44551152-6027-4108-8f30-a013a6df341f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.808548] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f14e82d1-f02e-4a2e-a07f-a6d5cdec3574 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1303.808841] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f14e82d1-f02e-4a2e-a07f-a6d5cdec3574 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1303.809150] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f14e82d1-f02e-4a2e-a07f-a6d5cdec3574 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Deleting the datastore file [datastore2] 16f60d87-180a-4e23-9d4b-960220489d33 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1303.809494] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-683729cf-0a63-4462-9b7e-c4b7f52bf81e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.818253] env[69994]: DEBUG oslo_vmware.api [None req-f14e82d1-f02e-4a2e-a07f-a6d5cdec3574 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1303.818253] env[69994]: value = "task-3242855" [ 1303.818253] env[69994]: _type = "Task" [ 1303.818253] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1303.827246] env[69994]: DEBUG oslo_vmware.api [None req-f14e82d1-f02e-4a2e-a07f-a6d5cdec3574 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242855, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.117884] env[69994]: DEBUG oslo_vmware.api [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242853, 'name': Rename_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.329345] env[69994]: DEBUG oslo_vmware.api [None req-f14e82d1-f02e-4a2e-a07f-a6d5cdec3574 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242855, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.210924} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1304.329547] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f14e82d1-f02e-4a2e-a07f-a6d5cdec3574 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1304.329731] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f14e82d1-f02e-4a2e-a07f-a6d5cdec3574 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1304.329910] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f14e82d1-f02e-4a2e-a07f-a6d5cdec3574 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1304.330094] env[69994]: INFO nova.compute.manager [None req-f14e82d1-f02e-4a2e-a07f-a6d5cdec3574 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1304.330336] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f14e82d1-f02e-4a2e-a07f-a6d5cdec3574 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1304.330524] env[69994]: DEBUG nova.compute.manager [-] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1304.330619] env[69994]: DEBUG nova.network.neutron [-] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1304.621133] env[69994]: DEBUG oslo_vmware.api [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242853, 'name': Rename_Task, 'duration_secs': 0.873968} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1304.621382] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1304.621552] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ef0c444d-663f-4fa6-8723-59a0b689f848 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.629712] env[69994]: DEBUG oslo_vmware.api [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1304.629712] env[69994]: value = "task-3242856" [ 1304.629712] env[69994]: _type = "Task" [ 1304.629712] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.636238] env[69994]: DEBUG nova.compute.manager [req-af67ce47-9ec5-410b-891d-009470b0fe06 req-cf1c37ab-7bc2-471f-88d1-b602ff8f56e5 service nova] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Received event network-vif-deleted-7109845a-8f7a-4f72-8da2-2640a4c07de4 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1304.636431] env[69994]: INFO nova.compute.manager [req-af67ce47-9ec5-410b-891d-009470b0fe06 req-cf1c37ab-7bc2-471f-88d1-b602ff8f56e5 service nova] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Neutron deleted interface 7109845a-8f7a-4f72-8da2-2640a4c07de4; detaching it from the instance and deleting it from the info cache [ 1304.636613] env[69994]: DEBUG nova.network.neutron [req-af67ce47-9ec5-410b-891d-009470b0fe06 req-cf1c37ab-7bc2-471f-88d1-b602ff8f56e5 service nova] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1304.643564] env[69994]: DEBUG oslo_vmware.api [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242856, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.947463] env[69994]: DEBUG nova.network.neutron [None req-7d472fac-d5dc-4c11-87dd-b04d45f28968 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Updating instance_info_cache with network_info: [{"id": "068d9b2b-b272-416b-8986-4baa4e3c1270", "address": "fa:16:3e:9c:df:58", "network": {"id": "84a79aab-0d1e-4aaf-9422-316cb7d239fb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1732515887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.240", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccb64f97e46a4e499df974959db53dcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap068d9b2b-b2", "ovs_interfaceid": "068d9b2b-b272-416b-8986-4baa4e3c1270", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1305.116645] env[69994]: DEBUG nova.network.neutron [-] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1305.141264] env[69994]: DEBUG oslo_vmware.api [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242856, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.142029] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6e3d9084-87c1-431b-b4a9-fbd95040b054 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.153647] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d73bbacf-822f-4e08-8eed-234281133e31 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.184436] env[69994]: DEBUG nova.compute.manager [req-af67ce47-9ec5-410b-891d-009470b0fe06 req-cf1c37ab-7bc2-471f-88d1-b602ff8f56e5 service nova] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Detach interface failed, port_id=7109845a-8f7a-4f72-8da2-2640a4c07de4, reason: Instance 16f60d87-180a-4e23-9d4b-960220489d33 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1305.206787] env[69994]: DEBUG oslo_concurrency.lockutils [None req-29aa0801-42cb-47d1-b9e5-03e3aeb9d0a9 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "interface-1735049d-a240-48fc-a360-3b00b02225b1-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1305.207068] env[69994]: DEBUG oslo_concurrency.lockutils [None req-29aa0801-42cb-47d1-b9e5-03e3aeb9d0a9 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "interface-1735049d-a240-48fc-a360-3b00b02225b1-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1305.207398] env[69994]: DEBUG nova.objects.instance [None req-29aa0801-42cb-47d1-b9e5-03e3aeb9d0a9 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lazy-loading 'flavor' on Instance uuid 1735049d-a240-48fc-a360-3b00b02225b1 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1305.449767] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d472fac-d5dc-4c11-87dd-b04d45f28968 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Releasing lock "refresh_cache-217bd31d-f705-4aa7-a8a7-d79e407b7c7b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1305.450057] env[69994]: DEBUG nova.objects.instance [None req-7d472fac-d5dc-4c11-87dd-b04d45f28968 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lazy-loading 'migration_context' on Instance uuid 217bd31d-f705-4aa7-a8a7-d79e407b7c7b {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1305.619522] env[69994]: INFO nova.compute.manager [-] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Took 1.29 seconds to deallocate network for instance. [ 1305.641455] env[69994]: DEBUG oslo_vmware.api [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242856, 'name': PowerOnVM_Task, 'duration_secs': 0.685315} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.641799] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1305.641887] env[69994]: INFO nova.compute.manager [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Took 8.86 seconds to spawn the instance on the hypervisor. [ 1305.642079] env[69994]: DEBUG nova.compute.manager [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1305.642822] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d21d5cab-57ed-4b63-b973-a2c3338eabb7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.711846] env[69994]: DEBUG nova.objects.instance [None req-29aa0801-42cb-47d1-b9e5-03e3aeb9d0a9 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lazy-loading 'pci_requests' on Instance uuid 1735049d-a240-48fc-a360-3b00b02225b1 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1305.952869] env[69994]: DEBUG nova.objects.base [None req-7d472fac-d5dc-4c11-87dd-b04d45f28968 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Object Instance<217bd31d-f705-4aa7-a8a7-d79e407b7c7b> lazy-loaded attributes: info_cache,migration_context {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1305.954170] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b932ba98-acf1-4a9e-a675-ef2319f5fec2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.973109] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2641ddad-c077-4363-8d8b-d8e5e4fd5daf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.979068] env[69994]: DEBUG oslo_vmware.api [None req-7d472fac-d5dc-4c11-87dd-b04d45f28968 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1305.979068] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5236d89c-c95c-93e0-318b-4aa85da24e8d" [ 1305.979068] env[69994]: _type = "Task" [ 1305.979068] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.986732] env[69994]: DEBUG oslo_vmware.api [None req-7d472fac-d5dc-4c11-87dd-b04d45f28968 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5236d89c-c95c-93e0-318b-4aa85da24e8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.126185] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f14e82d1-f02e-4a2e-a07f-a6d5cdec3574 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1306.126501] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f14e82d1-f02e-4a2e-a07f-a6d5cdec3574 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1306.126723] env[69994]: DEBUG nova.objects.instance [None req-f14e82d1-f02e-4a2e-a07f-a6d5cdec3574 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lazy-loading 'resources' on Instance uuid 16f60d87-180a-4e23-9d4b-960220489d33 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1306.158455] env[69994]: INFO nova.compute.manager [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Took 14.74 seconds to build instance. [ 1306.213646] env[69994]: DEBUG nova.objects.base [None req-29aa0801-42cb-47d1-b9e5-03e3aeb9d0a9 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Object Instance<1735049d-a240-48fc-a360-3b00b02225b1> lazy-loaded attributes: flavor,pci_requests {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1306.213882] env[69994]: DEBUG nova.network.neutron [None req-29aa0801-42cb-47d1-b9e5-03e3aeb9d0a9 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1306.335320] env[69994]: DEBUG oslo_concurrency.lockutils [None req-29aa0801-42cb-47d1-b9e5-03e3aeb9d0a9 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "interface-1735049d-a240-48fc-a360-3b00b02225b1-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.128s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1306.489522] env[69994]: DEBUG oslo_vmware.api [None req-7d472fac-d5dc-4c11-87dd-b04d45f28968 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5236d89c-c95c-93e0-318b-4aa85da24e8d, 'name': SearchDatastore_Task, 'duration_secs': 0.008414} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.489759] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d472fac-d5dc-4c11-87dd-b04d45f28968 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1306.662569] env[69994]: DEBUG oslo_concurrency.lockutils [None req-693e2479-93bb-4e93-aad3-f16c0df5ad02 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "7e92935f-fc1f-4893-8f69-4b97e4729a7f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.254s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1306.752069] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cccf8464-c413-4b01-9a1f-7a078e5396ac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.759659] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-718e98d5-1c08-4225-b463-da12a59ec6ba {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.792476] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfabc7b2-2c0e-4396-b6d1-0bbe819b63e2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.799799] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e612a70-d987-4c27-bb94-1748c4b1f8d6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.812792] env[69994]: DEBUG nova.compute.provider_tree [None req-f14e82d1-f02e-4a2e-a07f-a6d5cdec3574 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1307.001927] env[69994]: DEBUG nova.compute.manager [req-2643b77f-da17-4284-a02e-0019b71a4dd0 req-e415b38d-d5e3-4c8e-a380-a2ed74fe9ba7 service nova] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Received event network-changed-521061f8-5fe8-473b-ba95-6d17064efb80 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1307.002137] env[69994]: DEBUG nova.compute.manager [req-2643b77f-da17-4284-a02e-0019b71a4dd0 req-e415b38d-d5e3-4c8e-a380-a2ed74fe9ba7 service nova] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Refreshing instance network info cache due to event network-changed-521061f8-5fe8-473b-ba95-6d17064efb80. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1307.002354] env[69994]: DEBUG oslo_concurrency.lockutils [req-2643b77f-da17-4284-a02e-0019b71a4dd0 req-e415b38d-d5e3-4c8e-a380-a2ed74fe9ba7 service nova] Acquiring lock "refresh_cache-7e92935f-fc1f-4893-8f69-4b97e4729a7f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1307.002498] env[69994]: DEBUG oslo_concurrency.lockutils [req-2643b77f-da17-4284-a02e-0019b71a4dd0 req-e415b38d-d5e3-4c8e-a380-a2ed74fe9ba7 service nova] Acquired lock "refresh_cache-7e92935f-fc1f-4893-8f69-4b97e4729a7f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1307.002659] env[69994]: DEBUG nova.network.neutron [req-2643b77f-da17-4284-a02e-0019b71a4dd0 req-e415b38d-d5e3-4c8e-a380-a2ed74fe9ba7 service nova] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Refreshing network info cache for port 521061f8-5fe8-473b-ba95-6d17064efb80 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1307.316492] env[69994]: DEBUG nova.scheduler.client.report [None req-f14e82d1-f02e-4a2e-a07f-a6d5cdec3574 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1307.725224] env[69994]: DEBUG nova.network.neutron [req-2643b77f-da17-4284-a02e-0019b71a4dd0 req-e415b38d-d5e3-4c8e-a380-a2ed74fe9ba7 service nova] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Updated VIF entry in instance network info cache for port 521061f8-5fe8-473b-ba95-6d17064efb80. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1307.725613] env[69994]: DEBUG nova.network.neutron [req-2643b77f-da17-4284-a02e-0019b71a4dd0 req-e415b38d-d5e3-4c8e-a380-a2ed74fe9ba7 service nova] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Updating instance_info_cache with network_info: [{"id": "521061f8-5fe8-473b-ba95-6d17064efb80", "address": "fa:16:3e:ca:b0:f9", "network": {"id": "ac8cc640-01c3-4a64-8b69-1458ee2131a1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1685085861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38d5a89ed7c248c3be506ef12caf5f1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap521061f8-5f", "ovs_interfaceid": "521061f8-5fe8-473b-ba95-6d17064efb80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1307.821440] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f14e82d1-f02e-4a2e-a07f-a6d5cdec3574 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.695s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1307.824143] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d472fac-d5dc-4c11-87dd-b04d45f28968 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 1.334s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1307.841219] env[69994]: INFO nova.scheduler.client.report [None req-f14e82d1-f02e-4a2e-a07f-a6d5cdec3574 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Deleted allocations for instance 16f60d87-180a-4e23-9d4b-960220489d33 [ 1308.228792] env[69994]: DEBUG oslo_concurrency.lockutils [req-2643b77f-da17-4284-a02e-0019b71a4dd0 req-e415b38d-d5e3-4c8e-a380-a2ed74fe9ba7 service nova] Releasing lock "refresh_cache-7e92935f-fc1f-4893-8f69-4b97e4729a7f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1308.244587] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9405cb61-8b31-41de-b03e-a2cfc136dfde tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "interface-1735049d-a240-48fc-a360-3b00b02225b1-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1308.244871] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9405cb61-8b31-41de-b03e-a2cfc136dfde tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "interface-1735049d-a240-48fc-a360-3b00b02225b1-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1308.245222] env[69994]: DEBUG nova.objects.instance [None req-9405cb61-8b31-41de-b03e-a2cfc136dfde tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lazy-loading 'flavor' on Instance uuid 1735049d-a240-48fc-a360-3b00b02225b1 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1308.351143] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f14e82d1-f02e-4a2e-a07f-a6d5cdec3574 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "16f60d87-180a-4e23-9d4b-960220489d33" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.670s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1308.438733] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2564a592-e3a8-47ee-9fe1-cf032ed40d66 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.447177] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58e26935-e4ac-4b0e-9e8f-d60ca972cee4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.477384] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f185c6c-dc6d-4db2-9d3b-3c142c98e98a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.484513] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8892194d-164c-44be-acdf-71122dfa59eb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.497680] env[69994]: DEBUG nova.compute.provider_tree [None req-7d472fac-d5dc-4c11-87dd-b04d45f28968 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1308.822721] env[69994]: DEBUG nova.objects.instance [None req-9405cb61-8b31-41de-b03e-a2cfc136dfde tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lazy-loading 'pci_requests' on Instance uuid 1735049d-a240-48fc-a360-3b00b02225b1 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1309.000929] env[69994]: DEBUG nova.scheduler.client.report [None req-7d472fac-d5dc-4c11-87dd-b04d45f28968 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1309.325231] env[69994]: DEBUG nova.objects.base [None req-9405cb61-8b31-41de-b03e-a2cfc136dfde tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Object Instance<1735049d-a240-48fc-a360-3b00b02225b1> lazy-loaded attributes: flavor,pci_requests {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1309.325464] env[69994]: DEBUG nova.network.neutron [None req-9405cb61-8b31-41de-b03e-a2cfc136dfde tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1309.385315] env[69994]: DEBUG nova.policy [None req-9405cb61-8b31-41de-b03e-a2cfc136dfde tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aa110088642d455baaf060b5c9daaf5b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '70d05b502dfd4c5282872339c1e34d0c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1309.639618] env[69994]: DEBUG nova.network.neutron [None req-9405cb61-8b31-41de-b03e-a2cfc136dfde tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Successfully created port: 1ea752f0-3e05-4c05-8029-10dc8418968e {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1309.700765] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "e8c3effc-9430-433f-bf88-b3904cfaa31f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1309.701009] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "e8c3effc-9430-433f-bf88-b3904cfaa31f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1310.010878] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d472fac-d5dc-4c11-87dd-b04d45f28968 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.187s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1310.205494] env[69994]: DEBUG nova.compute.manager [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1310.566069] env[69994]: INFO nova.scheduler.client.report [None req-7d472fac-d5dc-4c11-87dd-b04d45f28968 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Deleted allocation for migration 23739038-3397-4e3b-8057-e4f144cc36fe [ 1310.631233] env[69994]: INFO nova.compute.manager [None req-68e46ea3-3522-4801-89d8-e85f8e071e4a tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Get console output [ 1310.631572] env[69994]: WARNING nova.virt.vmwareapi.driver [None req-68e46ea3-3522-4801-89d8-e85f8e071e4a tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] The console log is missing. Check your VSPC configuration [ 1310.723951] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1310.724276] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1310.725863] env[69994]: INFO nova.compute.claims [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1311.073128] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7d472fac-d5dc-4c11-87dd-b04d45f28968 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "217bd31d-f705-4aa7-a8a7-d79e407b7c7b" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.899s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1311.075796] env[69994]: DEBUG nova.network.neutron [None req-9405cb61-8b31-41de-b03e-a2cfc136dfde tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Successfully updated port: 1ea752f0-3e05-4c05-8029-10dc8418968e {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1311.152311] env[69994]: DEBUG nova.compute.manager [req-bee5b0c0-54f6-4471-8d60-840b556a193a req-b7be85ed-a6d6-4f40-b0de-1a0f98ae27a1 service nova] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Received event network-vif-plugged-1ea752f0-3e05-4c05-8029-10dc8418968e {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1311.152311] env[69994]: DEBUG oslo_concurrency.lockutils [req-bee5b0c0-54f6-4471-8d60-840b556a193a req-b7be85ed-a6d6-4f40-b0de-1a0f98ae27a1 service nova] Acquiring lock "1735049d-a240-48fc-a360-3b00b02225b1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1311.152614] env[69994]: DEBUG oslo_concurrency.lockutils [req-bee5b0c0-54f6-4471-8d60-840b556a193a req-b7be85ed-a6d6-4f40-b0de-1a0f98ae27a1 service nova] Lock "1735049d-a240-48fc-a360-3b00b02225b1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1311.152654] env[69994]: DEBUG oslo_concurrency.lockutils [req-bee5b0c0-54f6-4471-8d60-840b556a193a req-b7be85ed-a6d6-4f40-b0de-1a0f98ae27a1 service nova] Lock "1735049d-a240-48fc-a360-3b00b02225b1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1311.152856] env[69994]: DEBUG nova.compute.manager [req-bee5b0c0-54f6-4471-8d60-840b556a193a req-b7be85ed-a6d6-4f40-b0de-1a0f98ae27a1 service nova] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] No waiting events found dispatching network-vif-plugged-1ea752f0-3e05-4c05-8029-10dc8418968e {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1311.153015] env[69994]: WARNING nova.compute.manager [req-bee5b0c0-54f6-4471-8d60-840b556a193a req-b7be85ed-a6d6-4f40-b0de-1a0f98ae27a1 service nova] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Received unexpected event network-vif-plugged-1ea752f0-3e05-4c05-8029-10dc8418968e for instance with vm_state active and task_state None. [ 1311.579183] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9405cb61-8b31-41de-b03e-a2cfc136dfde tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "refresh_cache-1735049d-a240-48fc-a360-3b00b02225b1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1311.579305] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9405cb61-8b31-41de-b03e-a2cfc136dfde tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquired lock "refresh_cache-1735049d-a240-48fc-a360-3b00b02225b1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1311.579466] env[69994]: DEBUG nova.network.neutron [None req-9405cb61-8b31-41de-b03e-a2cfc136dfde tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1311.837889] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7efeb9b-aa3d-4254-8e16-fc5653b5e3ef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.845094] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55cd51fd-473d-42f8-8a19-feb9fd22626e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.876039] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-966be985-3a42-4f9f-94cf-3f7c957da452 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.883624] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62290c6f-30c8-419f-9783-e348ca459f8f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.897234] env[69994]: DEBUG nova.compute.provider_tree [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1312.115319] env[69994]: WARNING nova.network.neutron [None req-9405cb61-8b31-41de-b03e-a2cfc136dfde tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] d06bf010-e10f-41e7-ad34-421ab075833c already exists in list: networks containing: ['d06bf010-e10f-41e7-ad34-421ab075833c']. ignoring it [ 1312.380790] env[69994]: DEBUG nova.network.neutron [None req-9405cb61-8b31-41de-b03e-a2cfc136dfde tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Updating instance_info_cache with network_info: [{"id": "657bb865-1c59-4abc-b02a-bb91154c3cd9", "address": "fa:16:3e:9c:01:ca", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap657bb865-1c", "ovs_interfaceid": "657bb865-1c59-4abc-b02a-bb91154c3cd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1ea752f0-3e05-4c05-8029-10dc8418968e", "address": "fa:16:3e:ac:9c:28", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ea752f0-3e", "ovs_interfaceid": "1ea752f0-3e05-4c05-8029-10dc8418968e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1312.400183] env[69994]: DEBUG nova.scheduler.client.report [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1312.883832] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9405cb61-8b31-41de-b03e-a2cfc136dfde tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Releasing lock "refresh_cache-1735049d-a240-48fc-a360-3b00b02225b1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1312.884484] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9405cb61-8b31-41de-b03e-a2cfc136dfde tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "1735049d-a240-48fc-a360-3b00b02225b1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1312.884649] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9405cb61-8b31-41de-b03e-a2cfc136dfde tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquired lock "1735049d-a240-48fc-a360-3b00b02225b1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1312.885525] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f87ab90-3d73-472c-a409-b59773e26538 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.902659] env[69994]: DEBUG nova.virt.hardware [None req-9405cb61-8b31-41de-b03e-a2cfc136dfde tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1312.902894] env[69994]: DEBUG nova.virt.hardware [None req-9405cb61-8b31-41de-b03e-a2cfc136dfde tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1312.903086] env[69994]: DEBUG nova.virt.hardware [None req-9405cb61-8b31-41de-b03e-a2cfc136dfde tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1312.903279] env[69994]: DEBUG nova.virt.hardware [None req-9405cb61-8b31-41de-b03e-a2cfc136dfde tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1312.903425] env[69994]: DEBUG nova.virt.hardware [None req-9405cb61-8b31-41de-b03e-a2cfc136dfde tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1312.903570] env[69994]: DEBUG nova.virt.hardware [None req-9405cb61-8b31-41de-b03e-a2cfc136dfde tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1312.903790] env[69994]: DEBUG nova.virt.hardware [None req-9405cb61-8b31-41de-b03e-a2cfc136dfde tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1312.903965] env[69994]: DEBUG nova.virt.hardware [None req-9405cb61-8b31-41de-b03e-a2cfc136dfde tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1312.904160] env[69994]: DEBUG nova.virt.hardware [None req-9405cb61-8b31-41de-b03e-a2cfc136dfde tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1312.904329] env[69994]: DEBUG nova.virt.hardware [None req-9405cb61-8b31-41de-b03e-a2cfc136dfde tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1312.904503] env[69994]: DEBUG nova.virt.hardware [None req-9405cb61-8b31-41de-b03e-a2cfc136dfde tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1312.910655] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9405cb61-8b31-41de-b03e-a2cfc136dfde tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Reconfiguring VM to attach interface {{(pid=69994) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1312.911363] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.187s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1312.911797] env[69994]: DEBUG nova.compute.manager [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1312.914887] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d5a6a178-6f66-4c10-99e3-f94751de71ec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.931614] env[69994]: DEBUG oslo_vmware.api [None req-9405cb61-8b31-41de-b03e-a2cfc136dfde tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1312.931614] env[69994]: value = "task-3242857" [ 1312.931614] env[69994]: _type = "Task" [ 1312.931614] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.939190] env[69994]: DEBUG oslo_vmware.api [None req-9405cb61-8b31-41de-b03e-a2cfc136dfde tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242857, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.178566] env[69994]: DEBUG nova.compute.manager [req-01af4d1a-372c-4c52-9aee-7d8cf576016a req-efb2917e-d18f-46b2-b1d3-35a6ca36bce2 service nova] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Received event network-changed-1ea752f0-3e05-4c05-8029-10dc8418968e {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1313.178826] env[69994]: DEBUG nova.compute.manager [req-01af4d1a-372c-4c52-9aee-7d8cf576016a req-efb2917e-d18f-46b2-b1d3-35a6ca36bce2 service nova] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Refreshing instance network info cache due to event network-changed-1ea752f0-3e05-4c05-8029-10dc8418968e. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1313.179366] env[69994]: DEBUG oslo_concurrency.lockutils [req-01af4d1a-372c-4c52-9aee-7d8cf576016a req-efb2917e-d18f-46b2-b1d3-35a6ca36bce2 service nova] Acquiring lock "refresh_cache-1735049d-a240-48fc-a360-3b00b02225b1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1313.179523] env[69994]: DEBUG oslo_concurrency.lockutils [req-01af4d1a-372c-4c52-9aee-7d8cf576016a req-efb2917e-d18f-46b2-b1d3-35a6ca36bce2 service nova] Acquired lock "refresh_cache-1735049d-a240-48fc-a360-3b00b02225b1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1313.180040] env[69994]: DEBUG nova.network.neutron [req-01af4d1a-372c-4c52-9aee-7d8cf576016a req-efb2917e-d18f-46b2-b1d3-35a6ca36bce2 service nova] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Refreshing network info cache for port 1ea752f0-3e05-4c05-8029-10dc8418968e {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1313.416662] env[69994]: DEBUG nova.compute.utils [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1313.418579] env[69994]: DEBUG nova.compute.manager [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1313.418793] env[69994]: DEBUG nova.network.neutron [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1313.442256] env[69994]: DEBUG oslo_vmware.api [None req-9405cb61-8b31-41de-b03e-a2cfc136dfde tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242857, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.460033] env[69994]: DEBUG nova.policy [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '432370da6f1840db8f93b613ca52e31d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '42ee300d6f33459da1deb82b1b14cf74', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1313.686924] env[69994]: DEBUG nova.network.neutron [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Successfully created port: d3469937-e698-4fca-bf8e-450cdeb47b20 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1313.885292] env[69994]: DEBUG nova.network.neutron [req-01af4d1a-372c-4c52-9aee-7d8cf576016a req-efb2917e-d18f-46b2-b1d3-35a6ca36bce2 service nova] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Updated VIF entry in instance network info cache for port 1ea752f0-3e05-4c05-8029-10dc8418968e. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1313.885741] env[69994]: DEBUG nova.network.neutron [req-01af4d1a-372c-4c52-9aee-7d8cf576016a req-efb2917e-d18f-46b2-b1d3-35a6ca36bce2 service nova] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Updating instance_info_cache with network_info: [{"id": "657bb865-1c59-4abc-b02a-bb91154c3cd9", "address": "fa:16:3e:9c:01:ca", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap657bb865-1c", "ovs_interfaceid": "657bb865-1c59-4abc-b02a-bb91154c3cd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1ea752f0-3e05-4c05-8029-10dc8418968e", "address": "fa:16:3e:ac:9c:28", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ea752f0-3e", "ovs_interfaceid": "1ea752f0-3e05-4c05-8029-10dc8418968e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1313.921874] env[69994]: DEBUG nova.compute.manager [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1313.944022] env[69994]: DEBUG oslo_vmware.api [None req-9405cb61-8b31-41de-b03e-a2cfc136dfde tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242857, 'name': ReconfigVM_Task, 'duration_secs': 0.71983} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.944567] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9405cb61-8b31-41de-b03e-a2cfc136dfde tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Releasing lock "1735049d-a240-48fc-a360-3b00b02225b1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1313.944781] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9405cb61-8b31-41de-b03e-a2cfc136dfde tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Reconfigured VM to attach interface {{(pid=69994) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1314.388843] env[69994]: DEBUG oslo_concurrency.lockutils [req-01af4d1a-372c-4c52-9aee-7d8cf576016a req-efb2917e-d18f-46b2-b1d3-35a6ca36bce2 service nova] Releasing lock "refresh_cache-1735049d-a240-48fc-a360-3b00b02225b1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1314.449583] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9405cb61-8b31-41de-b03e-a2cfc136dfde tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "interface-1735049d-a240-48fc-a360-3b00b02225b1-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.205s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1314.932391] env[69994]: DEBUG nova.compute.manager [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1314.959428] env[69994]: DEBUG nova.virt.hardware [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1314.959666] env[69994]: DEBUG nova.virt.hardware [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1314.959825] env[69994]: DEBUG nova.virt.hardware [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1314.960111] env[69994]: DEBUG nova.virt.hardware [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1314.960344] env[69994]: DEBUG nova.virt.hardware [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1314.960517] env[69994]: DEBUG nova.virt.hardware [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1314.960880] env[69994]: DEBUG nova.virt.hardware [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1314.961134] env[69994]: DEBUG nova.virt.hardware [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1314.961324] env[69994]: DEBUG nova.virt.hardware [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1314.961686] env[69994]: DEBUG nova.virt.hardware [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1314.961884] env[69994]: DEBUG nova.virt.hardware [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1314.962732] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf0f2547-d2c8-4331-9f99-642dad4ac113 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.970578] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2c2359e-270b-4bcb-81bf-8bf2606b11ad {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.363389] env[69994]: DEBUG nova.compute.manager [req-7b9ef370-ec9a-46ce-ade9-044898bac8a7 req-0d2c7151-920e-4784-81c7-0dc2a0d25176 service nova] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Received event network-vif-plugged-d3469937-e698-4fca-bf8e-450cdeb47b20 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1315.363620] env[69994]: DEBUG oslo_concurrency.lockutils [req-7b9ef370-ec9a-46ce-ade9-044898bac8a7 req-0d2c7151-920e-4784-81c7-0dc2a0d25176 service nova] Acquiring lock "e8c3effc-9430-433f-bf88-b3904cfaa31f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1315.363848] env[69994]: DEBUG oslo_concurrency.lockutils [req-7b9ef370-ec9a-46ce-ade9-044898bac8a7 req-0d2c7151-920e-4784-81c7-0dc2a0d25176 service nova] Lock "e8c3effc-9430-433f-bf88-b3904cfaa31f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1315.364037] env[69994]: DEBUG oslo_concurrency.lockutils [req-7b9ef370-ec9a-46ce-ade9-044898bac8a7 req-0d2c7151-920e-4784-81c7-0dc2a0d25176 service nova] Lock "e8c3effc-9430-433f-bf88-b3904cfaa31f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1315.364208] env[69994]: DEBUG nova.compute.manager [req-7b9ef370-ec9a-46ce-ade9-044898bac8a7 req-0d2c7151-920e-4784-81c7-0dc2a0d25176 service nova] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] No waiting events found dispatching network-vif-plugged-d3469937-e698-4fca-bf8e-450cdeb47b20 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1315.364370] env[69994]: WARNING nova.compute.manager [req-7b9ef370-ec9a-46ce-ade9-044898bac8a7 req-0d2c7151-920e-4784-81c7-0dc2a0d25176 service nova] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Received unexpected event network-vif-plugged-d3469937-e698-4fca-bf8e-450cdeb47b20 for instance with vm_state building and task_state spawning. [ 1315.384920] env[69994]: DEBUG nova.network.neutron [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Successfully updated port: d3469937-e698-4fca-bf8e-450cdeb47b20 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1315.887241] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "refresh_cache-e8c3effc-9430-433f-bf88-b3904cfaa31f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1315.887487] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquired lock "refresh_cache-e8c3effc-9430-433f-bf88-b3904cfaa31f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1315.887487] env[69994]: DEBUG nova.network.neutron [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1316.304988] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7bfa30b6-b2a3-47be-b375-f244eab090bd tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "interface-1735049d-a240-48fc-a360-3b00b02225b1-24841bb8-cdad-449e-8fda-985505ccae81" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1316.305270] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7bfa30b6-b2a3-47be-b375-f244eab090bd tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "interface-1735049d-a240-48fc-a360-3b00b02225b1-24841bb8-cdad-449e-8fda-985505ccae81" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1316.305628] env[69994]: DEBUG nova.objects.instance [None req-7bfa30b6-b2a3-47be-b375-f244eab090bd tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lazy-loading 'flavor' on Instance uuid 1735049d-a240-48fc-a360-3b00b02225b1 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1316.416967] env[69994]: DEBUG nova.network.neutron [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1316.541970] env[69994]: DEBUG nova.network.neutron [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Updating instance_info_cache with network_info: [{"id": "d3469937-e698-4fca-bf8e-450cdeb47b20", "address": "fa:16:3e:25:80:19", "network": {"id": "d0928b0d-e338-4b93-b259-f37cac9dde85", "bridge": "br-int", "label": "tempest-ServersTestJSON-727748163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "42ee300d6f33459da1deb82b1b14cf74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089ef678-58b4-4bf0-a39d-b94b2d364291", "external-id": "nsx-vlan-transportzone-675", "segmentation_id": 675, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3469937-e6", "ovs_interfaceid": "d3469937-e698-4fca-bf8e-450cdeb47b20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1316.891653] env[69994]: DEBUG nova.objects.instance [None req-7bfa30b6-b2a3-47be-b375-f244eab090bd tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lazy-loading 'pci_requests' on Instance uuid 1735049d-a240-48fc-a360-3b00b02225b1 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1317.044667] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Releasing lock "refresh_cache-e8c3effc-9430-433f-bf88-b3904cfaa31f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1317.045149] env[69994]: DEBUG nova.compute.manager [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Instance network_info: |[{"id": "d3469937-e698-4fca-bf8e-450cdeb47b20", "address": "fa:16:3e:25:80:19", "network": {"id": "d0928b0d-e338-4b93-b259-f37cac9dde85", "bridge": "br-int", "label": "tempest-ServersTestJSON-727748163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "42ee300d6f33459da1deb82b1b14cf74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089ef678-58b4-4bf0-a39d-b94b2d364291", "external-id": "nsx-vlan-transportzone-675", "segmentation_id": 675, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3469937-e6", "ovs_interfaceid": "d3469937-e698-4fca-bf8e-450cdeb47b20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1317.045572] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:25:80:19', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '089ef678-58b4-4bf0-a39d-b94b2d364291', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd3469937-e698-4fca-bf8e-450cdeb47b20', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1317.053214] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1317.053415] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1317.054076] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-14351f76-6e9d-4a90-bf35-769147ad8fa2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.073575] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1317.073575] env[69994]: value = "task-3242858" [ 1317.073575] env[69994]: _type = "Task" [ 1317.073575] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.081109] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242858, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.393706] env[69994]: DEBUG nova.compute.manager [req-82b56d4e-6ae1-48c8-a7ea-d6ce91c023af req-ea140d72-9e63-43c5-a119-bc15fe814cd8 service nova] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Received event network-changed-d3469937-e698-4fca-bf8e-450cdeb47b20 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1317.394029] env[69994]: DEBUG nova.compute.manager [req-82b56d4e-6ae1-48c8-a7ea-d6ce91c023af req-ea140d72-9e63-43c5-a119-bc15fe814cd8 service nova] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Refreshing instance network info cache due to event network-changed-d3469937-e698-4fca-bf8e-450cdeb47b20. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1317.394267] env[69994]: DEBUG oslo_concurrency.lockutils [req-82b56d4e-6ae1-48c8-a7ea-d6ce91c023af req-ea140d72-9e63-43c5-a119-bc15fe814cd8 service nova] Acquiring lock "refresh_cache-e8c3effc-9430-433f-bf88-b3904cfaa31f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1317.394414] env[69994]: DEBUG oslo_concurrency.lockutils [req-82b56d4e-6ae1-48c8-a7ea-d6ce91c023af req-ea140d72-9e63-43c5-a119-bc15fe814cd8 service nova] Acquired lock "refresh_cache-e8c3effc-9430-433f-bf88-b3904cfaa31f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1317.394697] env[69994]: DEBUG nova.network.neutron [req-82b56d4e-6ae1-48c8-a7ea-d6ce91c023af req-ea140d72-9e63-43c5-a119-bc15fe814cd8 service nova] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Refreshing network info cache for port d3469937-e698-4fca-bf8e-450cdeb47b20 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1317.396409] env[69994]: DEBUG nova.objects.base [None req-7bfa30b6-b2a3-47be-b375-f244eab090bd tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Object Instance<1735049d-a240-48fc-a360-3b00b02225b1> lazy-loaded attributes: flavor,pci_requests {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1317.396642] env[69994]: DEBUG nova.network.neutron [None req-7bfa30b6-b2a3-47be-b375-f244eab090bd tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1317.463559] env[69994]: DEBUG nova.policy [None req-7bfa30b6-b2a3-47be-b375-f244eab090bd tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aa110088642d455baaf060b5c9daaf5b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '70d05b502dfd4c5282872339c1e34d0c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1317.583273] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242858, 'name': CreateVM_Task, 'duration_secs': 0.470753} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.583476] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1317.584134] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1317.584301] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1317.584630] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1317.584902] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-767a67cd-a163-40d0-8fe3-5b0d592f388e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.589330] env[69994]: DEBUG oslo_vmware.api [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1317.589330] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]525bc526-8d89-fe9a-4e88-db2a9b9ca5f8" [ 1317.589330] env[69994]: _type = "Task" [ 1317.589330] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.596886] env[69994]: DEBUG oslo_vmware.api [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]525bc526-8d89-fe9a-4e88-db2a9b9ca5f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.076943] env[69994]: DEBUG nova.network.neutron [req-82b56d4e-6ae1-48c8-a7ea-d6ce91c023af req-ea140d72-9e63-43c5-a119-bc15fe814cd8 service nova] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Updated VIF entry in instance network info cache for port d3469937-e698-4fca-bf8e-450cdeb47b20. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1318.077324] env[69994]: DEBUG nova.network.neutron [req-82b56d4e-6ae1-48c8-a7ea-d6ce91c023af req-ea140d72-9e63-43c5-a119-bc15fe814cd8 service nova] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Updating instance_info_cache with network_info: [{"id": "d3469937-e698-4fca-bf8e-450cdeb47b20", "address": "fa:16:3e:25:80:19", "network": {"id": "d0928b0d-e338-4b93-b259-f37cac9dde85", "bridge": "br-int", "label": "tempest-ServersTestJSON-727748163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "42ee300d6f33459da1deb82b1b14cf74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089ef678-58b4-4bf0-a39d-b94b2d364291", "external-id": "nsx-vlan-transportzone-675", "segmentation_id": 675, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3469937-e6", "ovs_interfaceid": "d3469937-e698-4fca-bf8e-450cdeb47b20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1318.099130] env[69994]: DEBUG oslo_vmware.api [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]525bc526-8d89-fe9a-4e88-db2a9b9ca5f8, 'name': SearchDatastore_Task, 'duration_secs': 0.011175} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.099422] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1318.099643] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1318.099870] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1318.100021] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1318.100200] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1318.100442] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2557c55d-217b-4a97-8c84-749db5f174a9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.109492] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1318.109753] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1318.111026] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd9bc69c-c1ed-4e03-9316-c657e1d213e7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.117124] env[69994]: DEBUG oslo_vmware.api [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1318.117124] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5274efd4-b796-cade-2a85-3cc0173ecd35" [ 1318.117124] env[69994]: _type = "Task" [ 1318.117124] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.127790] env[69994]: DEBUG oslo_vmware.api [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5274efd4-b796-cade-2a85-3cc0173ecd35, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.579993] env[69994]: DEBUG oslo_concurrency.lockutils [req-82b56d4e-6ae1-48c8-a7ea-d6ce91c023af req-ea140d72-9e63-43c5-a119-bc15fe814cd8 service nova] Releasing lock "refresh_cache-e8c3effc-9430-433f-bf88-b3904cfaa31f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1318.627078] env[69994]: DEBUG oslo_vmware.api [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5274efd4-b796-cade-2a85-3cc0173ecd35, 'name': SearchDatastore_Task, 'duration_secs': 0.01457} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.627811] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e11ab0a2-d5fd-44bc-a863-62aff45ab043 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.632410] env[69994]: DEBUG oslo_vmware.api [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1318.632410] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52b1c131-4067-a4bc-73cf-55050ecd5f03" [ 1318.632410] env[69994]: _type = "Task" [ 1318.632410] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.640671] env[69994]: DEBUG oslo_vmware.api [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52b1c131-4067-a4bc-73cf-55050ecd5f03, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.932856] env[69994]: DEBUG nova.network.neutron [None req-7bfa30b6-b2a3-47be-b375-f244eab090bd tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Successfully updated port: 24841bb8-cdad-449e-8fda-985505ccae81 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1319.142278] env[69994]: DEBUG oslo_vmware.api [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52b1c131-4067-a4bc-73cf-55050ecd5f03, 'name': SearchDatastore_Task, 'duration_secs': 0.011261} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.142580] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1319.142771] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] e8c3effc-9430-433f-bf88-b3904cfaa31f/e8c3effc-9430-433f-bf88-b3904cfaa31f.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1319.143029] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-97395b37-ac28-4d39-a3b6-367cbb59ec10 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.149216] env[69994]: DEBUG oslo_vmware.api [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1319.149216] env[69994]: value = "task-3242859" [ 1319.149216] env[69994]: _type = "Task" [ 1319.149216] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.156173] env[69994]: DEBUG oslo_vmware.api [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242859, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.420573] env[69994]: DEBUG nova.compute.manager [req-c4e855f5-b649-44d1-8946-b69794653c2f req-33eaa991-d461-4c4c-abf0-917821925d9c service nova] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Received event network-vif-plugged-24841bb8-cdad-449e-8fda-985505ccae81 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1319.420809] env[69994]: DEBUG oslo_concurrency.lockutils [req-c4e855f5-b649-44d1-8946-b69794653c2f req-33eaa991-d461-4c4c-abf0-917821925d9c service nova] Acquiring lock "1735049d-a240-48fc-a360-3b00b02225b1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1319.421115] env[69994]: DEBUG oslo_concurrency.lockutils [req-c4e855f5-b649-44d1-8946-b69794653c2f req-33eaa991-d461-4c4c-abf0-917821925d9c service nova] Lock "1735049d-a240-48fc-a360-3b00b02225b1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1319.421306] env[69994]: DEBUG oslo_concurrency.lockutils [req-c4e855f5-b649-44d1-8946-b69794653c2f req-33eaa991-d461-4c4c-abf0-917821925d9c service nova] Lock "1735049d-a240-48fc-a360-3b00b02225b1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1319.421512] env[69994]: DEBUG nova.compute.manager [req-c4e855f5-b649-44d1-8946-b69794653c2f req-33eaa991-d461-4c4c-abf0-917821925d9c service nova] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] No waiting events found dispatching network-vif-plugged-24841bb8-cdad-449e-8fda-985505ccae81 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1319.421716] env[69994]: WARNING nova.compute.manager [req-c4e855f5-b649-44d1-8946-b69794653c2f req-33eaa991-d461-4c4c-abf0-917821925d9c service nova] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Received unexpected event network-vif-plugged-24841bb8-cdad-449e-8fda-985505ccae81 for instance with vm_state active and task_state None. [ 1319.421948] env[69994]: DEBUG nova.compute.manager [req-c4e855f5-b649-44d1-8946-b69794653c2f req-33eaa991-d461-4c4c-abf0-917821925d9c service nova] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Received event network-changed-24841bb8-cdad-449e-8fda-985505ccae81 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1319.422144] env[69994]: DEBUG nova.compute.manager [req-c4e855f5-b649-44d1-8946-b69794653c2f req-33eaa991-d461-4c4c-abf0-917821925d9c service nova] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Refreshing instance network info cache due to event network-changed-24841bb8-cdad-449e-8fda-985505ccae81. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1319.422440] env[69994]: DEBUG oslo_concurrency.lockutils [req-c4e855f5-b649-44d1-8946-b69794653c2f req-33eaa991-d461-4c4c-abf0-917821925d9c service nova] Acquiring lock "refresh_cache-1735049d-a240-48fc-a360-3b00b02225b1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1319.422592] env[69994]: DEBUG oslo_concurrency.lockutils [req-c4e855f5-b649-44d1-8946-b69794653c2f req-33eaa991-d461-4c4c-abf0-917821925d9c service nova] Acquired lock "refresh_cache-1735049d-a240-48fc-a360-3b00b02225b1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1319.422779] env[69994]: DEBUG nova.network.neutron [req-c4e855f5-b649-44d1-8946-b69794653c2f req-33eaa991-d461-4c4c-abf0-917821925d9c service nova] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Refreshing network info cache for port 24841bb8-cdad-449e-8fda-985505ccae81 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1319.435491] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7bfa30b6-b2a3-47be-b375-f244eab090bd tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "refresh_cache-1735049d-a240-48fc-a360-3b00b02225b1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1319.659443] env[69994]: DEBUG oslo_vmware.api [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242859, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.503825} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.659697] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] e8c3effc-9430-433f-bf88-b3904cfaa31f/e8c3effc-9430-433f-bf88-b3904cfaa31f.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1319.659907] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1319.660232] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-033ffcb8-8507-4f7f-8594-7ef30eeb4993 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.667277] env[69994]: DEBUG oslo_vmware.api [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1319.667277] env[69994]: value = "task-3242860" [ 1319.667277] env[69994]: _type = "Task" [ 1319.667277] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.674299] env[69994]: DEBUG oslo_vmware.api [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242860, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.128530] env[69994]: DEBUG nova.network.neutron [req-c4e855f5-b649-44d1-8946-b69794653c2f req-33eaa991-d461-4c4c-abf0-917821925d9c service nova] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Added VIF to instance network info cache for port 24841bb8-cdad-449e-8fda-985505ccae81. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3546}} [ 1320.128935] env[69994]: DEBUG nova.network.neutron [req-c4e855f5-b649-44d1-8946-b69794653c2f req-33eaa991-d461-4c4c-abf0-917821925d9c service nova] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Updating instance_info_cache with network_info: [{"id": "657bb865-1c59-4abc-b02a-bb91154c3cd9", "address": "fa:16:3e:9c:01:ca", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap657bb865-1c", "ovs_interfaceid": "657bb865-1c59-4abc-b02a-bb91154c3cd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1ea752f0-3e05-4c05-8029-10dc8418968e", "address": "fa:16:3e:ac:9c:28", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ea752f0-3e", "ovs_interfaceid": "1ea752f0-3e05-4c05-8029-10dc8418968e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "24841bb8-cdad-449e-8fda-985505ccae81", "address": "fa:16:3e:e4:15:dd", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24841bb8-cd", "ovs_interfaceid": "24841bb8-cdad-449e-8fda-985505ccae81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1320.177015] env[69994]: DEBUG oslo_vmware.api [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242860, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.100945} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.177354] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1320.178042] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d8b1f19-7040-4037-8ff9-15d17023c480 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.199249] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Reconfiguring VM instance instance-00000075 to attach disk [datastore1] e8c3effc-9430-433f-bf88-b3904cfaa31f/e8c3effc-9430-433f-bf88-b3904cfaa31f.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1320.199474] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73f83bb2-3080-4ade-b123-2c070b44407d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.218014] env[69994]: DEBUG oslo_vmware.api [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1320.218014] env[69994]: value = "task-3242861" [ 1320.218014] env[69994]: _type = "Task" [ 1320.218014] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.225058] env[69994]: DEBUG oslo_vmware.api [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242861, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.632053] env[69994]: DEBUG oslo_concurrency.lockutils [req-c4e855f5-b649-44d1-8946-b69794653c2f req-33eaa991-d461-4c4c-abf0-917821925d9c service nova] Releasing lock "refresh_cache-1735049d-a240-48fc-a360-3b00b02225b1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1320.632507] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7bfa30b6-b2a3-47be-b375-f244eab090bd tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquired lock "refresh_cache-1735049d-a240-48fc-a360-3b00b02225b1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1320.632682] env[69994]: DEBUG nova.network.neutron [None req-7bfa30b6-b2a3-47be-b375-f244eab090bd tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1320.727673] env[69994]: DEBUG oslo_vmware.api [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242861, 'name': ReconfigVM_Task, 'duration_secs': 0.338701} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.727933] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Reconfigured VM instance instance-00000075 to attach disk [datastore1] e8c3effc-9430-433f-bf88-b3904cfaa31f/e8c3effc-9430-433f-bf88-b3904cfaa31f.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1320.728664] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-efbb3105-202f-4472-b1ae-9b3e72c23df2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.735717] env[69994]: DEBUG oslo_vmware.api [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1320.735717] env[69994]: value = "task-3242862" [ 1320.735717] env[69994]: _type = "Task" [ 1320.735717] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.743164] env[69994]: DEBUG oslo_vmware.api [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242862, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.168323] env[69994]: WARNING nova.network.neutron [None req-7bfa30b6-b2a3-47be-b375-f244eab090bd tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] d06bf010-e10f-41e7-ad34-421ab075833c already exists in list: networks containing: ['d06bf010-e10f-41e7-ad34-421ab075833c']. ignoring it [ 1321.168580] env[69994]: WARNING nova.network.neutron [None req-7bfa30b6-b2a3-47be-b375-f244eab090bd tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] d06bf010-e10f-41e7-ad34-421ab075833c already exists in list: networks containing: ['d06bf010-e10f-41e7-ad34-421ab075833c']. ignoring it [ 1321.168776] env[69994]: WARNING nova.network.neutron [None req-7bfa30b6-b2a3-47be-b375-f244eab090bd tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] d06bf010-e10f-41e7-ad34-421ab075833c already exists in list: networks containing: ['d06bf010-e10f-41e7-ad34-421ab075833c']. ignoring it [ 1321.168937] env[69994]: WARNING nova.network.neutron [None req-7bfa30b6-b2a3-47be-b375-f244eab090bd tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] 24841bb8-cdad-449e-8fda-985505ccae81 already exists in list: port_ids containing: ['24841bb8-cdad-449e-8fda-985505ccae81']. ignoring it [ 1321.245539] env[69994]: DEBUG oslo_vmware.api [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242862, 'name': Rename_Task, 'duration_secs': 0.207373} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.247897] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1321.248242] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9a4b0402-55a3-49c3-b2c8-89fbc69c3dbf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.254438] env[69994]: DEBUG oslo_vmware.api [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1321.254438] env[69994]: value = "task-3242863" [ 1321.254438] env[69994]: _type = "Task" [ 1321.254438] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1321.261815] env[69994]: DEBUG oslo_vmware.api [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242863, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.550424] env[69994]: DEBUG nova.network.neutron [None req-7bfa30b6-b2a3-47be-b375-f244eab090bd tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Updating instance_info_cache with network_info: [{"id": "657bb865-1c59-4abc-b02a-bb91154c3cd9", "address": "fa:16:3e:9c:01:ca", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap657bb865-1c", "ovs_interfaceid": "657bb865-1c59-4abc-b02a-bb91154c3cd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1ea752f0-3e05-4c05-8029-10dc8418968e", "address": "fa:16:3e:ac:9c:28", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ea752f0-3e", "ovs_interfaceid": "1ea752f0-3e05-4c05-8029-10dc8418968e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "24841bb8-cdad-449e-8fda-985505ccae81", "address": "fa:16:3e:e4:15:dd", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24841bb8-cd", "ovs_interfaceid": "24841bb8-cdad-449e-8fda-985505ccae81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1321.764565] env[69994]: DEBUG oslo_vmware.api [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242863, 'name': PowerOnVM_Task, 'duration_secs': 0.458263} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.764836] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1321.765050] env[69994]: INFO nova.compute.manager [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Took 6.83 seconds to spawn the instance on the hypervisor. [ 1321.765236] env[69994]: DEBUG nova.compute.manager [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1321.766026] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aae1b88-ccf2-4538-89e1-8bf40a458d95 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.053520] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7bfa30b6-b2a3-47be-b375-f244eab090bd tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Releasing lock "refresh_cache-1735049d-a240-48fc-a360-3b00b02225b1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1322.054220] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7bfa30b6-b2a3-47be-b375-f244eab090bd tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "1735049d-a240-48fc-a360-3b00b02225b1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1322.054383] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7bfa30b6-b2a3-47be-b375-f244eab090bd tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquired lock "1735049d-a240-48fc-a360-3b00b02225b1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1322.055252] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f7b2d59-e392-4e1e-b423-3e9c19aad78c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.072968] env[69994]: DEBUG nova.virt.hardware [None req-7bfa30b6-b2a3-47be-b375-f244eab090bd tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1322.073195] env[69994]: DEBUG nova.virt.hardware [None req-7bfa30b6-b2a3-47be-b375-f244eab090bd tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1322.073351] env[69994]: DEBUG nova.virt.hardware [None req-7bfa30b6-b2a3-47be-b375-f244eab090bd tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1322.073535] env[69994]: DEBUG nova.virt.hardware [None req-7bfa30b6-b2a3-47be-b375-f244eab090bd tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1322.073681] env[69994]: DEBUG nova.virt.hardware [None req-7bfa30b6-b2a3-47be-b375-f244eab090bd tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1322.073827] env[69994]: DEBUG nova.virt.hardware [None req-7bfa30b6-b2a3-47be-b375-f244eab090bd tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1322.074125] env[69994]: DEBUG nova.virt.hardware [None req-7bfa30b6-b2a3-47be-b375-f244eab090bd tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1322.074321] env[69994]: DEBUG nova.virt.hardware [None req-7bfa30b6-b2a3-47be-b375-f244eab090bd tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1322.074497] env[69994]: DEBUG nova.virt.hardware [None req-7bfa30b6-b2a3-47be-b375-f244eab090bd tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1322.074663] env[69994]: DEBUG nova.virt.hardware [None req-7bfa30b6-b2a3-47be-b375-f244eab090bd tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1322.074839] env[69994]: DEBUG nova.virt.hardware [None req-7bfa30b6-b2a3-47be-b375-f244eab090bd tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1322.081072] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7bfa30b6-b2a3-47be-b375-f244eab090bd tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Reconfiguring VM to attach interface {{(pid=69994) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1322.081356] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-095e375b-ef0b-4cf1-8ade-c4d5705f523d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.099767] env[69994]: DEBUG oslo_vmware.api [None req-7bfa30b6-b2a3-47be-b375-f244eab090bd tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1322.099767] env[69994]: value = "task-3242864" [ 1322.099767] env[69994]: _type = "Task" [ 1322.099767] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1322.106942] env[69994]: DEBUG oslo_vmware.api [None req-7bfa30b6-b2a3-47be-b375-f244eab090bd tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242864, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.283014] env[69994]: INFO nova.compute.manager [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Took 11.57 seconds to build instance. [ 1322.609540] env[69994]: DEBUG oslo_vmware.api [None req-7bfa30b6-b2a3-47be-b375-f244eab090bd tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242864, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.785363] env[69994]: DEBUG oslo_concurrency.lockutils [None req-be37a002-1444-4d45-a453-7add573f044e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "e8c3effc-9430-433f-bf88-b3904cfaa31f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.084s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1322.876261] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f7f28317-9c38-483f-b6fa-241d64d789ca tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "e8c3effc-9430-433f-bf88-b3904cfaa31f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1322.876526] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f7f28317-9c38-483f-b6fa-241d64d789ca tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "e8c3effc-9430-433f-bf88-b3904cfaa31f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1322.876739] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f7f28317-9c38-483f-b6fa-241d64d789ca tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "e8c3effc-9430-433f-bf88-b3904cfaa31f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1322.876925] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f7f28317-9c38-483f-b6fa-241d64d789ca tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "e8c3effc-9430-433f-bf88-b3904cfaa31f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1322.877110] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f7f28317-9c38-483f-b6fa-241d64d789ca tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "e8c3effc-9430-433f-bf88-b3904cfaa31f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1322.879321] env[69994]: INFO nova.compute.manager [None req-f7f28317-9c38-483f-b6fa-241d64d789ca tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Terminating instance [ 1323.109771] env[69994]: DEBUG oslo_vmware.api [None req-7bfa30b6-b2a3-47be-b375-f244eab090bd tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242864, 'name': ReconfigVM_Task, 'duration_secs': 0.59269} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.109991] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7bfa30b6-b2a3-47be-b375-f244eab090bd tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Releasing lock "1735049d-a240-48fc-a360-3b00b02225b1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1323.110204] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7bfa30b6-b2a3-47be-b375-f244eab090bd tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Reconfigured VM to attach interface {{(pid=69994) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1323.384093] env[69994]: DEBUG nova.compute.manager [None req-f7f28317-9c38-483f-b6fa-241d64d789ca tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1323.384387] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f7f28317-9c38-483f-b6fa-241d64d789ca tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1323.385607] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd8d3eab-8114-4690-880e-9175755d734d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.393214] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7f28317-9c38-483f-b6fa-241d64d789ca tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1323.393438] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-54b72b16-2da1-4387-97e4-2f245a0ee292 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.399074] env[69994]: DEBUG oslo_vmware.api [None req-f7f28317-9c38-483f-b6fa-241d64d789ca tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1323.399074] env[69994]: value = "task-3242865" [ 1323.399074] env[69994]: _type = "Task" [ 1323.399074] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.406887] env[69994]: DEBUG oslo_vmware.api [None req-f7f28317-9c38-483f-b6fa-241d64d789ca tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242865, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.614782] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7bfa30b6-b2a3-47be-b375-f244eab090bd tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "interface-1735049d-a240-48fc-a360-3b00b02225b1-24841bb8-cdad-449e-8fda-985505ccae81" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.309s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1323.908732] env[69994]: DEBUG oslo_vmware.api [None req-f7f28317-9c38-483f-b6fa-241d64d789ca tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242865, 'name': PowerOffVM_Task, 'duration_secs': 0.195252} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.908992] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7f28317-9c38-483f-b6fa-241d64d789ca tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1323.909175] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f7f28317-9c38-483f-b6fa-241d64d789ca tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1323.909413] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6dd61bbf-581e-4e14-80ef-7e5ac4cd7307 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.977416] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f7f28317-9c38-483f-b6fa-241d64d789ca tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1323.977652] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f7f28317-9c38-483f-b6fa-241d64d789ca tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1323.977838] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7f28317-9c38-483f-b6fa-241d64d789ca tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Deleting the datastore file [datastore1] e8c3effc-9430-433f-bf88-b3904cfaa31f {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1323.978149] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d1af740d-da53-4cc9-80d0-e742e5a98259 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.984075] env[69994]: DEBUG oslo_vmware.api [None req-f7f28317-9c38-483f-b6fa-241d64d789ca tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1323.984075] env[69994]: value = "task-3242867" [ 1323.984075] env[69994]: _type = "Task" [ 1323.984075] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.991521] env[69994]: DEBUG oslo_vmware.api [None req-f7f28317-9c38-483f-b6fa-241d64d789ca tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242867, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.494793] env[69994]: DEBUG oslo_vmware.api [None req-f7f28317-9c38-483f-b6fa-241d64d789ca tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242867, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14328} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.495151] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7f28317-9c38-483f-b6fa-241d64d789ca tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1324.495265] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f7f28317-9c38-483f-b6fa-241d64d789ca tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1324.495397] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f7f28317-9c38-483f-b6fa-241d64d789ca tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1324.495571] env[69994]: INFO nova.compute.manager [None req-f7f28317-9c38-483f-b6fa-241d64d789ca tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1324.495813] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f7f28317-9c38-483f-b6fa-241d64d789ca tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1324.496000] env[69994]: DEBUG nova.compute.manager [-] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1324.496111] env[69994]: DEBUG nova.network.neutron [-] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1324.835492] env[69994]: DEBUG nova.compute.manager [req-7eb7270f-0b11-489b-beec-a04a21cc0c79 req-5370bd3e-3af5-4231-8ae1-ea48fb1e1f9c service nova] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Received event network-vif-deleted-d3469937-e698-4fca-bf8e-450cdeb47b20 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1324.835698] env[69994]: INFO nova.compute.manager [req-7eb7270f-0b11-489b-beec-a04a21cc0c79 req-5370bd3e-3af5-4231-8ae1-ea48fb1e1f9c service nova] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Neutron deleted interface d3469937-e698-4fca-bf8e-450cdeb47b20; detaching it from the instance and deleting it from the info cache [ 1324.835875] env[69994]: DEBUG nova.network.neutron [req-7eb7270f-0b11-489b-beec-a04a21cc0c79 req-5370bd3e-3af5-4231-8ae1-ea48fb1e1f9c service nova] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1325.146358] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1325.146702] env[69994]: DEBUG oslo_concurrency.lockutils [None req-32a67349-52de-4e2e-92e6-0959e00239f3 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "interface-1735049d-a240-48fc-a360-3b00b02225b1-1ea752f0-3e05-4c05-8029-10dc8418968e" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1325.146933] env[69994]: DEBUG oslo_concurrency.lockutils [None req-32a67349-52de-4e2e-92e6-0959e00239f3 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "interface-1735049d-a240-48fc-a360-3b00b02225b1-1ea752f0-3e05-4c05-8029-10dc8418968e" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1325.148265] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1325.148433] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69994) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1325.313325] env[69994]: DEBUG nova.network.neutron [-] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1325.337947] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-72b9f528-5d11-4b61-bc7c-76bd0591c8a6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.347844] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b865dea2-c2ea-4549-8020-c5a711411f74 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.378497] env[69994]: DEBUG nova.compute.manager [req-7eb7270f-0b11-489b-beec-a04a21cc0c79 req-5370bd3e-3af5-4231-8ae1-ea48fb1e1f9c service nova] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Detach interface failed, port_id=d3469937-e698-4fca-bf8e-450cdeb47b20, reason: Instance e8c3effc-9430-433f-bf88-b3904cfaa31f could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1325.650192] env[69994]: DEBUG oslo_concurrency.lockutils [None req-32a67349-52de-4e2e-92e6-0959e00239f3 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "1735049d-a240-48fc-a360-3b00b02225b1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1325.650593] env[69994]: DEBUG oslo_concurrency.lockutils [None req-32a67349-52de-4e2e-92e6-0959e00239f3 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquired lock "1735049d-a240-48fc-a360-3b00b02225b1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1325.651338] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8088f681-26fc-454c-82ef-2b0f595b03d3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.669849] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0ce6970-e7c9-42d1-aa44-341ba7d413ae {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.697334] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-32a67349-52de-4e2e-92e6-0959e00239f3 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Reconfiguring VM to detach interface {{(pid=69994) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1325.697590] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-737d11bc-6a6e-4447-8cc6-77bcb08808e7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.714961] env[69994]: DEBUG oslo_vmware.api [None req-32a67349-52de-4e2e-92e6-0959e00239f3 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1325.714961] env[69994]: value = "task-3242868" [ 1325.714961] env[69994]: _type = "Task" [ 1325.714961] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.722528] env[69994]: DEBUG oslo_vmware.api [None req-32a67349-52de-4e2e-92e6-0959e00239f3 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242868, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.816292] env[69994]: INFO nova.compute.manager [-] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Took 1.32 seconds to deallocate network for instance. [ 1326.145346] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1326.145571] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1326.225517] env[69994]: DEBUG oslo_vmware.api [None req-32a67349-52de-4e2e-92e6-0959e00239f3 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242868, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.322214] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f7f28317-9c38-483f-b6fa-241d64d789ca tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1326.322557] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f7f28317-9c38-483f-b6fa-241d64d789ca tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1326.322931] env[69994]: DEBUG nova.objects.instance [None req-f7f28317-9c38-483f-b6fa-241d64d789ca tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lazy-loading 'resources' on Instance uuid e8c3effc-9430-433f-bf88-b3904cfaa31f {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1326.648117] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1326.725414] env[69994]: DEBUG oslo_vmware.api [None req-32a67349-52de-4e2e-92e6-0959e00239f3 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242868, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.931132] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5009b2ae-de78-40d0-8df1-d7dab93cd5e1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.938414] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcdea32a-a695-43b9-b595-16db952ea7a4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.967983] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b974f13-b50a-48c2-b03e-9e6a209f0f20 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.974538] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6c5bf6d-45e7-48c1-a8d7-e4612eea36c1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.987641] env[69994]: DEBUG nova.compute.provider_tree [None req-f7f28317-9c38-483f-b6fa-241d64d789ca tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1327.226639] env[69994]: DEBUG oslo_vmware.api [None req-32a67349-52de-4e2e-92e6-0959e00239f3 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242868, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.491191] env[69994]: DEBUG nova.scheduler.client.report [None req-f7f28317-9c38-483f-b6fa-241d64d789ca tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1327.726603] env[69994]: DEBUG oslo_vmware.api [None req-32a67349-52de-4e2e-92e6-0959e00239f3 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242868, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.997051] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f7f28317-9c38-483f-b6fa-241d64d789ca tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.674s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1327.998732] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.351s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1327.998921] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1327.999099] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69994) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1327.999938] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0b26cff-94b4-4573-8a53-bdc0506886cb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.009340] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e29ecb1-09c5-46e2-aefa-d0f2e1cdf644 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.014224] env[69994]: INFO nova.scheduler.client.report [None req-f7f28317-9c38-483f-b6fa-241d64d789ca tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Deleted allocations for instance e8c3effc-9430-433f-bf88-b3904cfaa31f [ 1328.025421] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46e71027-6198-4ffc-9955-bc58b61adc7d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.031662] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a081242-faa4-4208-b2f6-d39a3a6c8187 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.060807] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180072MB free_disk=120GB free_vcpus=48 pci_devices=None {{(pid=69994) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1328.061036] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1328.061831] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1328.228258] env[69994]: DEBUG oslo_vmware.api [None req-32a67349-52de-4e2e-92e6-0959e00239f3 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242868, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.532106] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f7f28317-9c38-483f-b6fa-241d64d789ca tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "e8c3effc-9430-433f-bf88-b3904cfaa31f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.655s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1328.729272] env[69994]: DEBUG oslo_vmware.api [None req-32a67349-52de-4e2e-92e6-0959e00239f3 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242868, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.087960] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance ef37ce64-2c26-4080-899a-6d9dbb5850c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1329.088133] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 6b73608e-b62f-4292-870c-51f1c686e569 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1329.088261] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 1735049d-a240-48fc-a360-3b00b02225b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1329.088431] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 73288b0c-7e85-48cd-9ea1-d08a31a81c32 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1329.088492] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 217bd31d-f705-4aa7-a8a7-d79e407b7c7b actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1329.088605] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 71ee4730-f0e5-4c71-8053-be9e73b702a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1329.088803] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 7e92935f-fc1f-4893-8f69-4b97e4729a7f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1329.088918] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1329.089127] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1920MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1329.169826] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a58cae44-6786-4354-8b47-a047709875b8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.178953] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b72aaf83-9571-4f1d-9350-7a8146faaca5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.209053] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e611499-91c0-48b6-9c91-124a2ff0a891 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.216024] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c42290ef-5228-472e-83eb-44cffb3f2fc5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.230468] env[69994]: DEBUG nova.compute.provider_tree [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1329.236351] env[69994]: DEBUG oslo_vmware.api [None req-32a67349-52de-4e2e-92e6-0959e00239f3 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242868, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.729784] env[69994]: DEBUG oslo_vmware.api [None req-32a67349-52de-4e2e-92e6-0959e00239f3 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242868, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.733689] env[69994]: DEBUG nova.scheduler.client.report [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1329.888481] env[69994]: DEBUG oslo_concurrency.lockutils [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "4f002725-3f15-4d10-a7ee-07132faf6266" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1329.888818] env[69994]: DEBUG oslo_concurrency.lockutils [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "4f002725-3f15-4d10-a7ee-07132faf6266" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1330.231240] env[69994]: DEBUG oslo_vmware.api [None req-32a67349-52de-4e2e-92e6-0959e00239f3 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242868, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.238055] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69994) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1330.238191] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.177s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1330.391077] env[69994]: DEBUG nova.compute.manager [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1330.731262] env[69994]: DEBUG oslo_vmware.api [None req-32a67349-52de-4e2e-92e6-0959e00239f3 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242868, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.913200] env[69994]: DEBUG oslo_concurrency.lockutils [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1330.913529] env[69994]: DEBUG oslo_concurrency.lockutils [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1330.915081] env[69994]: INFO nova.compute.claims [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1331.232421] env[69994]: DEBUG oslo_vmware.api [None req-32a67349-52de-4e2e-92e6-0959e00239f3 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242868, 'name': ReconfigVM_Task} progress is 18%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.238908] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1331.239169] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1331.239418] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1331.733155] env[69994]: DEBUG oslo_vmware.api [None req-32a67349-52de-4e2e-92e6-0959e00239f3 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242868, 'name': ReconfigVM_Task, 'duration_secs': 5.767316} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.733593] env[69994]: DEBUG oslo_concurrency.lockutils [None req-32a67349-52de-4e2e-92e6-0959e00239f3 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Releasing lock "1735049d-a240-48fc-a360-3b00b02225b1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1331.733593] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-32a67349-52de-4e2e-92e6-0959e00239f3 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Reconfigured VM to detach interface {{(pid=69994) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1331.959128] env[69994]: DEBUG nova.compute.manager [req-1980a82f-c7c2-4e1c-aea7-9023d85ef819 req-762b70e2-633b-449a-9118-97325d9f45a6 service nova] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Received event network-vif-deleted-1ea752f0-3e05-4c05-8029-10dc8418968e {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1331.959337] env[69994]: INFO nova.compute.manager [req-1980a82f-c7c2-4e1c-aea7-9023d85ef819 req-762b70e2-633b-449a-9118-97325d9f45a6 service nova] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Neutron deleted interface 1ea752f0-3e05-4c05-8029-10dc8418968e; detaching it from the instance and deleting it from the info cache [ 1331.959636] env[69994]: DEBUG nova.network.neutron [req-1980a82f-c7c2-4e1c-aea7-9023d85ef819 req-762b70e2-633b-449a-9118-97325d9f45a6 service nova] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Updating instance_info_cache with network_info: [{"id": "657bb865-1c59-4abc-b02a-bb91154c3cd9", "address": "fa:16:3e:9c:01:ca", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap657bb865-1c", "ovs_interfaceid": "657bb865-1c59-4abc-b02a-bb91154c3cd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "24841bb8-cdad-449e-8fda-985505ccae81", "address": "fa:16:3e:e4:15:dd", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24841bb8-cd", "ovs_interfaceid": "24841bb8-cdad-449e-8fda-985505ccae81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1332.032049] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11dd2f04-8215-4ca3-b4c0-b554cf21f874 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.039690] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b7c50f7-0bd3-41ef-b5e4-295c5d2b7c42 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.070531] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83d9b269-45ca-4c0d-a5a0-f8482e3c378d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.077822] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1c016af-4b89-4c37-a8dd-458574ce84b8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.090865] env[69994]: DEBUG nova.compute.provider_tree [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1332.145469] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1332.462055] env[69994]: DEBUG oslo_concurrency.lockutils [req-1980a82f-c7c2-4e1c-aea7-9023d85ef819 req-762b70e2-633b-449a-9118-97325d9f45a6 service nova] Acquiring lock "1735049d-a240-48fc-a360-3b00b02225b1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1332.462307] env[69994]: DEBUG oslo_concurrency.lockutils [req-1980a82f-c7c2-4e1c-aea7-9023d85ef819 req-762b70e2-633b-449a-9118-97325d9f45a6 service nova] Acquired lock "1735049d-a240-48fc-a360-3b00b02225b1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1332.463598] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e0ad4c5-5f4c-4876-b29e-093fc1b6ded4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.482564] env[69994]: DEBUG oslo_concurrency.lockutils [req-1980a82f-c7c2-4e1c-aea7-9023d85ef819 req-762b70e2-633b-449a-9118-97325d9f45a6 service nova] Releasing lock "1735049d-a240-48fc-a360-3b00b02225b1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1332.482806] env[69994]: WARNING nova.compute.manager [req-1980a82f-c7c2-4e1c-aea7-9023d85ef819 req-762b70e2-633b-449a-9118-97325d9f45a6 service nova] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Detach interface failed, port_id=1ea752f0-3e05-4c05-8029-10dc8418968e, reason: No device with interface-id 1ea752f0-3e05-4c05-8029-10dc8418968e exists on VM: nova.exception.NotFound: No device with interface-id 1ea752f0-3e05-4c05-8029-10dc8418968e exists on VM [ 1332.594328] env[69994]: DEBUG nova.scheduler.client.report [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1332.942474] env[69994]: DEBUG oslo_concurrency.lockutils [None req-32a67349-52de-4e2e-92e6-0959e00239f3 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "refresh_cache-1735049d-a240-48fc-a360-3b00b02225b1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1332.942779] env[69994]: DEBUG oslo_concurrency.lockutils [None req-32a67349-52de-4e2e-92e6-0959e00239f3 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquired lock "refresh_cache-1735049d-a240-48fc-a360-3b00b02225b1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1332.942923] env[69994]: DEBUG nova.network.neutron [None req-32a67349-52de-4e2e-92e6-0959e00239f3 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1333.007852] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f764fec5-1cce-4443-b567-76e670bf1756 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "1735049d-a240-48fc-a360-3b00b02225b1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1333.008129] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f764fec5-1cce-4443-b567-76e670bf1756 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "1735049d-a240-48fc-a360-3b00b02225b1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1333.008348] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f764fec5-1cce-4443-b567-76e670bf1756 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "1735049d-a240-48fc-a360-3b00b02225b1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1333.008532] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f764fec5-1cce-4443-b567-76e670bf1756 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "1735049d-a240-48fc-a360-3b00b02225b1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1333.008697] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f764fec5-1cce-4443-b567-76e670bf1756 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "1735049d-a240-48fc-a360-3b00b02225b1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1333.010701] env[69994]: INFO nova.compute.manager [None req-f764fec5-1cce-4443-b567-76e670bf1756 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Terminating instance [ 1333.098887] env[69994]: DEBUG oslo_concurrency.lockutils [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.185s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1333.099486] env[69994]: DEBUG nova.compute.manager [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1333.514638] env[69994]: DEBUG nova.compute.manager [None req-f764fec5-1cce-4443-b567-76e670bf1756 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1333.514859] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f764fec5-1cce-4443-b567-76e670bf1756 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1333.515793] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0c33d18-c3d7-4c85-94c9-47f8cf8050ba {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.525778] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f764fec5-1cce-4443-b567-76e670bf1756 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1333.526046] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1f1dfe12-dd9d-456f-81ba-36118a3d137d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.531651] env[69994]: DEBUG oslo_vmware.api [None req-f764fec5-1cce-4443-b567-76e670bf1756 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1333.531651] env[69994]: value = "task-3242869" [ 1333.531651] env[69994]: _type = "Task" [ 1333.531651] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1333.539461] env[69994]: DEBUG oslo_vmware.api [None req-f764fec5-1cce-4443-b567-76e670bf1756 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242869, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.604028] env[69994]: DEBUG nova.compute.utils [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1333.605500] env[69994]: DEBUG nova.compute.manager [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1333.605671] env[69994]: DEBUG nova.network.neutron [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1333.642869] env[69994]: DEBUG nova.policy [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '432370da6f1840db8f93b613ca52e31d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '42ee300d6f33459da1deb82b1b14cf74', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1333.707941] env[69994]: INFO nova.network.neutron [None req-32a67349-52de-4e2e-92e6-0959e00239f3 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Port 24841bb8-cdad-449e-8fda-985505ccae81 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1333.708352] env[69994]: DEBUG nova.network.neutron [None req-32a67349-52de-4e2e-92e6-0959e00239f3 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Updating instance_info_cache with network_info: [{"id": "657bb865-1c59-4abc-b02a-bb91154c3cd9", "address": "fa:16:3e:9c:01:ca", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap657bb865-1c", "ovs_interfaceid": "657bb865-1c59-4abc-b02a-bb91154c3cd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1333.923573] env[69994]: DEBUG nova.network.neutron [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Successfully created port: f2ed70aa-9d71-4b1f-9f7a-805aae4daa5f {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1333.983184] env[69994]: DEBUG nova.compute.manager [req-0453e66f-4b45-4d21-bc48-a8bc4165671c req-ff5f093b-93dc-4e17-8860-58fc2dde909a service nova] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Received event network-vif-deleted-24841bb8-cdad-449e-8fda-985505ccae81 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1334.042652] env[69994]: DEBUG oslo_vmware.api [None req-f764fec5-1cce-4443-b567-76e670bf1756 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242869, 'name': PowerOffVM_Task, 'duration_secs': 0.21151} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.042896] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f764fec5-1cce-4443-b567-76e670bf1756 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1334.043168] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f764fec5-1cce-4443-b567-76e670bf1756 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1334.043533] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-67ebf70e-8ec8-4fe9-a3fe-7fd657493374 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.108512] env[69994]: DEBUG nova.compute.manager [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1334.150268] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f764fec5-1cce-4443-b567-76e670bf1756 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1334.150486] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f764fec5-1cce-4443-b567-76e670bf1756 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1334.150671] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f764fec5-1cce-4443-b567-76e670bf1756 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Deleting the datastore file [datastore2] 1735049d-a240-48fc-a360-3b00b02225b1 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1334.150934] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f331fbf7-9f53-4966-a906-995c8dc5b056 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.157992] env[69994]: DEBUG oslo_vmware.api [None req-f764fec5-1cce-4443-b567-76e670bf1756 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1334.157992] env[69994]: value = "task-3242871" [ 1334.157992] env[69994]: _type = "Task" [ 1334.157992] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.166254] env[69994]: DEBUG oslo_vmware.api [None req-f764fec5-1cce-4443-b567-76e670bf1756 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242871, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.211044] env[69994]: DEBUG oslo_concurrency.lockutils [None req-32a67349-52de-4e2e-92e6-0959e00239f3 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Releasing lock "refresh_cache-1735049d-a240-48fc-a360-3b00b02225b1" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1334.669069] env[69994]: DEBUG oslo_vmware.api [None req-f764fec5-1cce-4443-b567-76e670bf1756 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242871, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173953} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.669069] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f764fec5-1cce-4443-b567-76e670bf1756 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1334.669069] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f764fec5-1cce-4443-b567-76e670bf1756 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1334.669069] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f764fec5-1cce-4443-b567-76e670bf1756 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1334.669440] env[69994]: INFO nova.compute.manager [None req-f764fec5-1cce-4443-b567-76e670bf1756 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1334.669440] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f764fec5-1cce-4443-b567-76e670bf1756 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1334.669617] env[69994]: DEBUG nova.compute.manager [-] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1334.669694] env[69994]: DEBUG nova.network.neutron [-] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1334.714453] env[69994]: DEBUG oslo_concurrency.lockutils [None req-32a67349-52de-4e2e-92e6-0959e00239f3 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "interface-1735049d-a240-48fc-a360-3b00b02225b1-1ea752f0-3e05-4c05-8029-10dc8418968e" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.567s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1334.722300] env[69994]: DEBUG neutronclient.v2_0.client [-] Error message: {"NeutronError": {"type": "PortNotFound", "message": "Port 24841bb8-cdad-449e-8fda-985505ccae81 could not be found.", "detail": ""}} {{(pid=69994) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1334.722747] env[69994]: DEBUG nova.network.neutron [-] Unable to show port 24841bb8-cdad-449e-8fda-985505ccae81 as it no longer exists. {{(pid=69994) _unbind_ports /opt/stack/nova/nova/network/neutron.py:666}} [ 1335.118600] env[69994]: DEBUG nova.compute.manager [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1335.152507] env[69994]: DEBUG nova.virt.hardware [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1335.152762] env[69994]: DEBUG nova.virt.hardware [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1335.153018] env[69994]: DEBUG nova.virt.hardware [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1335.153324] env[69994]: DEBUG nova.virt.hardware [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1335.153551] env[69994]: DEBUG nova.virt.hardware [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1335.153786] env[69994]: DEBUG nova.virt.hardware [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1335.154136] env[69994]: DEBUG nova.virt.hardware [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1335.154445] env[69994]: DEBUG nova.virt.hardware [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1335.154716] env[69994]: DEBUG nova.virt.hardware [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1335.154974] env[69994]: DEBUG nova.virt.hardware [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1335.155256] env[69994]: DEBUG nova.virt.hardware [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1335.156941] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-411cd06b-6e50-4615-be2b-94ca45c58550 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.168254] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c584c19b-2b12-4390-8ab4-bb6288f30730 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.324616] env[69994]: DEBUG nova.compute.manager [req-06a39e1b-7e9e-49dd-b0fd-02a0fbe03cd6 req-487c5677-d3aa-4cc8-9789-6c08319e9b94 service nova] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Received event network-vif-plugged-f2ed70aa-9d71-4b1f-9f7a-805aae4daa5f {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1335.324855] env[69994]: DEBUG oslo_concurrency.lockutils [req-06a39e1b-7e9e-49dd-b0fd-02a0fbe03cd6 req-487c5677-d3aa-4cc8-9789-6c08319e9b94 service nova] Acquiring lock "4f002725-3f15-4d10-a7ee-07132faf6266-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1335.325097] env[69994]: DEBUG oslo_concurrency.lockutils [req-06a39e1b-7e9e-49dd-b0fd-02a0fbe03cd6 req-487c5677-d3aa-4cc8-9789-6c08319e9b94 service nova] Lock "4f002725-3f15-4d10-a7ee-07132faf6266-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1335.325277] env[69994]: DEBUG oslo_concurrency.lockutils [req-06a39e1b-7e9e-49dd-b0fd-02a0fbe03cd6 req-487c5677-d3aa-4cc8-9789-6c08319e9b94 service nova] Lock "4f002725-3f15-4d10-a7ee-07132faf6266-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1335.325526] env[69994]: DEBUG nova.compute.manager [req-06a39e1b-7e9e-49dd-b0fd-02a0fbe03cd6 req-487c5677-d3aa-4cc8-9789-6c08319e9b94 service nova] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] No waiting events found dispatching network-vif-plugged-f2ed70aa-9d71-4b1f-9f7a-805aae4daa5f {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1335.325600] env[69994]: WARNING nova.compute.manager [req-06a39e1b-7e9e-49dd-b0fd-02a0fbe03cd6 req-487c5677-d3aa-4cc8-9789-6c08319e9b94 service nova] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Received unexpected event network-vif-plugged-f2ed70aa-9d71-4b1f-9f7a-805aae4daa5f for instance with vm_state building and task_state spawning. [ 1335.403185] env[69994]: DEBUG nova.network.neutron [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Successfully updated port: f2ed70aa-9d71-4b1f-9f7a-805aae4daa5f {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1335.624702] env[69994]: DEBUG nova.network.neutron [-] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1335.907227] env[69994]: DEBUG oslo_concurrency.lockutils [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "refresh_cache-4f002725-3f15-4d10-a7ee-07132faf6266" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1335.907365] env[69994]: DEBUG oslo_concurrency.lockutils [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquired lock "refresh_cache-4f002725-3f15-4d10-a7ee-07132faf6266" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1335.907570] env[69994]: DEBUG nova.network.neutron [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1336.007830] env[69994]: DEBUG nova.compute.manager [req-69f8a6f0-330c-44b5-a4ef-ba3b12ea44df req-a93f8ce8-bf02-458c-ba40-5ae02dd9edc0 service nova] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Received event network-vif-deleted-657bb865-1c59-4abc-b02a-bb91154c3cd9 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1336.126726] env[69994]: INFO nova.compute.manager [-] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Took 1.46 seconds to deallocate network for instance. [ 1336.440757] env[69994]: DEBUG nova.network.neutron [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1336.566721] env[69994]: DEBUG nova.network.neutron [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Updating instance_info_cache with network_info: [{"id": "f2ed70aa-9d71-4b1f-9f7a-805aae4daa5f", "address": "fa:16:3e:00:0c:a3", "network": {"id": "d0928b0d-e338-4b93-b259-f37cac9dde85", "bridge": "br-int", "label": "tempest-ServersTestJSON-727748163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "42ee300d6f33459da1deb82b1b14cf74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089ef678-58b4-4bf0-a39d-b94b2d364291", "external-id": "nsx-vlan-transportzone-675", "segmentation_id": 675, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2ed70aa-9d", "ovs_interfaceid": "f2ed70aa-9d71-4b1f-9f7a-805aae4daa5f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1336.633313] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f764fec5-1cce-4443-b567-76e670bf1756 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1336.633579] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f764fec5-1cce-4443-b567-76e670bf1756 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1336.633805] env[69994]: DEBUG nova.objects.instance [None req-f764fec5-1cce-4443-b567-76e670bf1756 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lazy-loading 'resources' on Instance uuid 1735049d-a240-48fc-a360-3b00b02225b1 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1337.070099] env[69994]: DEBUG oslo_concurrency.lockutils [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Releasing lock "refresh_cache-4f002725-3f15-4d10-a7ee-07132faf6266" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1337.070099] env[69994]: DEBUG nova.compute.manager [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Instance network_info: |[{"id": "f2ed70aa-9d71-4b1f-9f7a-805aae4daa5f", "address": "fa:16:3e:00:0c:a3", "network": {"id": "d0928b0d-e338-4b93-b259-f37cac9dde85", "bridge": "br-int", "label": "tempest-ServersTestJSON-727748163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "42ee300d6f33459da1deb82b1b14cf74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089ef678-58b4-4bf0-a39d-b94b2d364291", "external-id": "nsx-vlan-transportzone-675", "segmentation_id": 675, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2ed70aa-9d", "ovs_interfaceid": "f2ed70aa-9d71-4b1f-9f7a-805aae4daa5f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1337.070099] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:00:0c:a3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '089ef678-58b4-4bf0-a39d-b94b2d364291', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f2ed70aa-9d71-4b1f-9f7a-805aae4daa5f', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1337.078076] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1337.078173] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1337.078736] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fefe3c45-6a75-429c-9dba-0d8b96cb6f8c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.099775] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1337.099775] env[69994]: value = "task-3242872" [ 1337.099775] env[69994]: _type = "Task" [ 1337.099775] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.109130] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242872, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.247440] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6877767-66ee-47d7-9f2d-9b822af53be4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.254946] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-394cbb5d-b3fa-429e-b679-d384683586f3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.286193] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edfe3e4b-258f-4cbb-a7c3-988d8c4bc48a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.293292] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12be491d-b54f-4279-a52c-33f99f43aa13 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.306408] env[69994]: DEBUG nova.compute.provider_tree [None req-f764fec5-1cce-4443-b567-76e670bf1756 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1337.353681] env[69994]: DEBUG nova.compute.manager [req-b7f11c99-14b0-4e6a-a944-7ef90f1a98aa req-40ddc0ed-a9bd-43cb-92dc-8da79eb99358 service nova] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Received event network-changed-f2ed70aa-9d71-4b1f-9f7a-805aae4daa5f {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1337.353844] env[69994]: DEBUG nova.compute.manager [req-b7f11c99-14b0-4e6a-a944-7ef90f1a98aa req-40ddc0ed-a9bd-43cb-92dc-8da79eb99358 service nova] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Refreshing instance network info cache due to event network-changed-f2ed70aa-9d71-4b1f-9f7a-805aae4daa5f. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1337.354148] env[69994]: DEBUG oslo_concurrency.lockutils [req-b7f11c99-14b0-4e6a-a944-7ef90f1a98aa req-40ddc0ed-a9bd-43cb-92dc-8da79eb99358 service nova] Acquiring lock "refresh_cache-4f002725-3f15-4d10-a7ee-07132faf6266" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1337.354291] env[69994]: DEBUG oslo_concurrency.lockutils [req-b7f11c99-14b0-4e6a-a944-7ef90f1a98aa req-40ddc0ed-a9bd-43cb-92dc-8da79eb99358 service nova] Acquired lock "refresh_cache-4f002725-3f15-4d10-a7ee-07132faf6266" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1337.354493] env[69994]: DEBUG nova.network.neutron [req-b7f11c99-14b0-4e6a-a944-7ef90f1a98aa req-40ddc0ed-a9bd-43cb-92dc-8da79eb99358 service nova] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Refreshing network info cache for port f2ed70aa-9d71-4b1f-9f7a-805aae4daa5f {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1337.538585] env[69994]: DEBUG oslo_concurrency.lockutils [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "217bd31d-f705-4aa7-a8a7-d79e407b7c7b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1337.538870] env[69994]: DEBUG oslo_concurrency.lockutils [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "217bd31d-f705-4aa7-a8a7-d79e407b7c7b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1337.539098] env[69994]: DEBUG oslo_concurrency.lockutils [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "217bd31d-f705-4aa7-a8a7-d79e407b7c7b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1337.539290] env[69994]: DEBUG oslo_concurrency.lockutils [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "217bd31d-f705-4aa7-a8a7-d79e407b7c7b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1337.539468] env[69994]: DEBUG oslo_concurrency.lockutils [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "217bd31d-f705-4aa7-a8a7-d79e407b7c7b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1337.541819] env[69994]: INFO nova.compute.manager [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Terminating instance [ 1337.610577] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242872, 'name': CreateVM_Task, 'duration_secs': 0.315603} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.610758] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1337.611453] env[69994]: DEBUG oslo_concurrency.lockutils [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1337.611620] env[69994]: DEBUG oslo_concurrency.lockutils [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1337.611954] env[69994]: DEBUG oslo_concurrency.lockutils [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1337.612231] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9739a92d-811f-4910-836a-5b3bc6b31c26 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.616844] env[69994]: DEBUG oslo_vmware.api [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1337.616844] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]521737e0-19f8-6344-b8c5-26844d0eb983" [ 1337.616844] env[69994]: _type = "Task" [ 1337.616844] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.624412] env[69994]: DEBUG oslo_vmware.api [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521737e0-19f8-6344-b8c5-26844d0eb983, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.810080] env[69994]: DEBUG nova.scheduler.client.report [None req-f764fec5-1cce-4443-b567-76e670bf1756 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1338.034672] env[69994]: DEBUG nova.network.neutron [req-b7f11c99-14b0-4e6a-a944-7ef90f1a98aa req-40ddc0ed-a9bd-43cb-92dc-8da79eb99358 service nova] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Updated VIF entry in instance network info cache for port f2ed70aa-9d71-4b1f-9f7a-805aae4daa5f. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1338.035063] env[69994]: DEBUG nova.network.neutron [req-b7f11c99-14b0-4e6a-a944-7ef90f1a98aa req-40ddc0ed-a9bd-43cb-92dc-8da79eb99358 service nova] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Updating instance_info_cache with network_info: [{"id": "f2ed70aa-9d71-4b1f-9f7a-805aae4daa5f", "address": "fa:16:3e:00:0c:a3", "network": {"id": "d0928b0d-e338-4b93-b259-f37cac9dde85", "bridge": "br-int", "label": "tempest-ServersTestJSON-727748163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "42ee300d6f33459da1deb82b1b14cf74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089ef678-58b4-4bf0-a39d-b94b2d364291", "external-id": "nsx-vlan-transportzone-675", "segmentation_id": 675, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2ed70aa-9d", "ovs_interfaceid": "f2ed70aa-9d71-4b1f-9f7a-805aae4daa5f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1338.045470] env[69994]: DEBUG nova.compute.manager [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1338.045698] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1338.045957] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5650f8fb-074a-489c-9170-5952c5a7dd99 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.053963] env[69994]: DEBUG oslo_vmware.api [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1338.053963] env[69994]: value = "task-3242873" [ 1338.053963] env[69994]: _type = "Task" [ 1338.053963] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.062405] env[69994]: DEBUG oslo_vmware.api [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242873, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.127770] env[69994]: DEBUG oslo_vmware.api [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521737e0-19f8-6344-b8c5-26844d0eb983, 'name': SearchDatastore_Task, 'duration_secs': 0.010976} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.128017] env[69994]: DEBUG oslo_concurrency.lockutils [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1338.128256] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1338.128496] env[69994]: DEBUG oslo_concurrency.lockutils [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1338.128642] env[69994]: DEBUG oslo_concurrency.lockutils [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1338.128817] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1338.129083] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b0881711-19b7-4796-9e74-17fce764e622 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.137261] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1338.137440] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1338.138182] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de077357-32b8-43ee-a13f-7456dacc9060 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.142973] env[69994]: DEBUG oslo_vmware.api [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1338.142973] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5252576e-c4d5-bdcc-11f9-ffa7964fb34e" [ 1338.142973] env[69994]: _type = "Task" [ 1338.142973] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.150530] env[69994]: DEBUG oslo_vmware.api [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5252576e-c4d5-bdcc-11f9-ffa7964fb34e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.200843] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquiring lock "71ee4730-f0e5-4c71-8053-be9e73b702a4" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1338.201183] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lock "71ee4730-f0e5-4c71-8053-be9e73b702a4" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1338.201419] env[69994]: INFO nova.compute.manager [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Shelving [ 1338.316263] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f764fec5-1cce-4443-b567-76e670bf1756 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.682s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1338.338832] env[69994]: INFO nova.scheduler.client.report [None req-f764fec5-1cce-4443-b567-76e670bf1756 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Deleted allocations for instance 1735049d-a240-48fc-a360-3b00b02225b1 [ 1338.538082] env[69994]: DEBUG oslo_concurrency.lockutils [req-b7f11c99-14b0-4e6a-a944-7ef90f1a98aa req-40ddc0ed-a9bd-43cb-92dc-8da79eb99358 service nova] Releasing lock "refresh_cache-4f002725-3f15-4d10-a7ee-07132faf6266" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1338.563243] env[69994]: DEBUG oslo_vmware.api [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242873, 'name': PowerOffVM_Task, 'duration_secs': 0.200629} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.563572] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1338.563721] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Volume detach. Driver type: vmdk {{(pid=69994) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1338.563954] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-648050', 'volume_id': 'c725763f-b1d7-421a-95e2-cd5644ee630e', 'name': 'volume-c725763f-b1d7-421a-95e2-cd5644ee630e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '217bd31d-f705-4aa7-a8a7-d79e407b7c7b', 'attached_at': '2025-04-03T08:50:37.000000', 'detached_at': '', 'volume_id': 'c725763f-b1d7-421a-95e2-cd5644ee630e', 'serial': 'c725763f-b1d7-421a-95e2-cd5644ee630e'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1338.564714] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-033d8549-873d-4bb5-8cce-1d0be9660e83 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.582279] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b35d878e-efd9-49e4-bb76-8eb373eb5666 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.588708] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1804f2b2-30b2-4329-991d-f72cb25d9342 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.605607] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f106a643-9993-4ff0-ab23-68a6eb0810aa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.620777] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] The volume has not been displaced from its original location: [datastore1] volume-c725763f-b1d7-421a-95e2-cd5644ee630e/volume-c725763f-b1d7-421a-95e2-cd5644ee630e.vmdk. No consolidation needed. {{(pid=69994) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1338.625912] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Reconfiguring VM instance instance-00000070 to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1338.626180] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-321f216a-bd85-460d-aa55-ad19005b406f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.642955] env[69994]: DEBUG oslo_vmware.api [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1338.642955] env[69994]: value = "task-3242874" [ 1338.642955] env[69994]: _type = "Task" [ 1338.642955] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.653608] env[69994]: DEBUG oslo_vmware.api [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5252576e-c4d5-bdcc-11f9-ffa7964fb34e, 'name': SearchDatastore_Task, 'duration_secs': 0.008311} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.657311] env[69994]: DEBUG oslo_vmware.api [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242874, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.657523] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4daa8e07-254c-4203-8a34-9dc8dff225c2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.661982] env[69994]: DEBUG oslo_vmware.api [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1338.661982] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52b566b8-7a1d-2470-adeb-7a1fc41e4e86" [ 1338.661982] env[69994]: _type = "Task" [ 1338.661982] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.669292] env[69994]: DEBUG oslo_vmware.api [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52b566b8-7a1d-2470-adeb-7a1fc41e4e86, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.847838] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f764fec5-1cce-4443-b567-76e670bf1756 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "1735049d-a240-48fc-a360-3b00b02225b1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.840s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1339.153155] env[69994]: DEBUG oslo_vmware.api [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242874, 'name': ReconfigVM_Task, 'duration_secs': 0.151066} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.154912] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Reconfigured VM instance instance-00000070 to detach disk 2000 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1339.157981] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f96b1214-1617-4741-b9ab-e067abcf90fd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.176038] env[69994]: DEBUG oslo_vmware.api [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52b566b8-7a1d-2470-adeb-7a1fc41e4e86, 'name': SearchDatastore_Task, 'duration_secs': 0.010304} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.177307] env[69994]: DEBUG oslo_concurrency.lockutils [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1339.177603] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 4f002725-3f15-4d10-a7ee-07132faf6266/4f002725-3f15-4d10-a7ee-07132faf6266.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1339.178012] env[69994]: DEBUG oslo_vmware.api [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1339.178012] env[69994]: value = "task-3242875" [ 1339.178012] env[69994]: _type = "Task" [ 1339.178012] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.178198] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-45c85498-3e35-4187-88c3-d10496127134 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.189510] env[69994]: DEBUG oslo_vmware.api [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242875, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.190104] env[69994]: DEBUG oslo_vmware.api [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1339.190104] env[69994]: value = "task-3242876" [ 1339.190104] env[69994]: _type = "Task" [ 1339.190104] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.197227] env[69994]: DEBUG oslo_vmware.api [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242876, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.210837] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1339.211118] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c76d0170-ed6d-4436-a55f-1cdbb43c3603 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.216225] env[69994]: DEBUG oslo_vmware.api [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1339.216225] env[69994]: value = "task-3242877" [ 1339.216225] env[69994]: _type = "Task" [ 1339.216225] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.224461] env[69994]: DEBUG oslo_vmware.api [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242877, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.690224] env[69994]: DEBUG oslo_vmware.api [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242875, 'name': ReconfigVM_Task, 'duration_secs': 0.173864} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.690975] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-648050', 'volume_id': 'c725763f-b1d7-421a-95e2-cd5644ee630e', 'name': 'volume-c725763f-b1d7-421a-95e2-cd5644ee630e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '217bd31d-f705-4aa7-a8a7-d79e407b7c7b', 'attached_at': '2025-04-03T08:50:37.000000', 'detached_at': '', 'volume_id': 'c725763f-b1d7-421a-95e2-cd5644ee630e', 'serial': 'c725763f-b1d7-421a-95e2-cd5644ee630e'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1339.691466] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1339.695458] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a1b6579-5148-490f-8480-61a043600c58 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.703482] env[69994]: DEBUG oslo_vmware.api [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242876, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.475403} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.705653] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 4f002725-3f15-4d10-a7ee-07132faf6266/4f002725-3f15-4d10-a7ee-07132faf6266.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1339.705871] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1339.706154] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1339.706412] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b66d3e75-f69c-46da-ada2-78032a9aaebd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.708096] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d1c9b280-1d5f-4906-9f79-20e2d7f6ad18 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.714276] env[69994]: DEBUG oslo_vmware.api [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1339.714276] env[69994]: value = "task-3242878" [ 1339.714276] env[69994]: _type = "Task" [ 1339.714276] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.725217] env[69994]: DEBUG oslo_vmware.api [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242878, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.727745] env[69994]: DEBUG oslo_vmware.api [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242877, 'name': PowerOffVM_Task, 'duration_secs': 0.210978} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.727989] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1339.728769] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d60e9981-7e17-4535-a574-0ccab5ab8587 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.748065] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f77258a1-f065-433b-b627-1069b08cd575 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.781252] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1339.781472] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1339.781658] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Deleting the datastore file [datastore1] 217bd31d-f705-4aa7-a8a7-d79e407b7c7b {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1339.781927] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9a095014-96f0-4e32-a791-60df8767cc3a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.788457] env[69994]: DEBUG oslo_vmware.api [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1339.788457] env[69994]: value = "task-3242880" [ 1339.788457] env[69994]: _type = "Task" [ 1339.788457] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.796119] env[69994]: DEBUG oslo_vmware.api [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242880, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.224175] env[69994]: DEBUG oslo_vmware.api [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242878, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069709} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.224514] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1340.225441] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b110743-bcca-406a-ac3d-f1122e70b92d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.251012] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Reconfiguring VM instance instance-00000076 to attach disk [datastore2] 4f002725-3f15-4d10-a7ee-07132faf6266/4f002725-3f15-4d10-a7ee-07132faf6266.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1340.251012] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-188cd6a6-d134-467f-abf1-5f183088b989 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.263225] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Creating Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1340.263666] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-5f592867-737a-40c0-962d-93fcb64e0271 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.271577] env[69994]: DEBUG oslo_vmware.api [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1340.271577] env[69994]: value = "task-3242882" [ 1340.271577] env[69994]: _type = "Task" [ 1340.271577] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1340.274161] env[69994]: DEBUG oslo_vmware.api [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1340.274161] env[69994]: value = "task-3242881" [ 1340.274161] env[69994]: _type = "Task" [ 1340.274161] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1340.288067] env[69994]: DEBUG oslo_vmware.api [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242882, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.291712] env[69994]: DEBUG oslo_vmware.api [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242881, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.299209] env[69994]: DEBUG oslo_vmware.api [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242880, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.319124} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.299459] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1340.299640] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1340.299814] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1340.299986] env[69994]: INFO nova.compute.manager [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Took 2.25 seconds to destroy the instance on the hypervisor. [ 1340.300254] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1340.300447] env[69994]: DEBUG nova.compute.manager [-] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1340.300543] env[69994]: DEBUG nova.network.neutron [-] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1340.738603] env[69994]: DEBUG nova.compute.manager [req-e96bfae0-cdf2-4eea-802b-3b29af2105ab req-5df8a6da-4e98-4d7f-b7d9-bf4a1b95a70e service nova] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Received event network-vif-deleted-068d9b2b-b272-416b-8986-4baa4e3c1270 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1340.738865] env[69994]: INFO nova.compute.manager [req-e96bfae0-cdf2-4eea-802b-3b29af2105ab req-5df8a6da-4e98-4d7f-b7d9-bf4a1b95a70e service nova] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Neutron deleted interface 068d9b2b-b272-416b-8986-4baa4e3c1270; detaching it from the instance and deleting it from the info cache [ 1340.739078] env[69994]: DEBUG nova.network.neutron [req-e96bfae0-cdf2-4eea-802b-3b29af2105ab req-5df8a6da-4e98-4d7f-b7d9-bf4a1b95a70e service nova] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1340.785494] env[69994]: DEBUG oslo_vmware.api [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242882, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.789891] env[69994]: DEBUG oslo_vmware.api [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242881, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.074466] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "902200b2-f2ca-4979-961a-ec046d22d05c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1341.074709] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "902200b2-f2ca-4979-961a-ec046d22d05c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1341.216091] env[69994]: DEBUG nova.network.neutron [-] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1341.241656] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-70069820-220b-4d8e-a99c-11caa80d5135 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.251621] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21798b8a-9a5c-4e39-a1d6-2808e9165e99 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.279397] env[69994]: DEBUG nova.compute.manager [req-e96bfae0-cdf2-4eea-802b-3b29af2105ab req-5df8a6da-4e98-4d7f-b7d9-bf4a1b95a70e service nova] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Detach interface failed, port_id=068d9b2b-b272-416b-8986-4baa4e3c1270, reason: Instance 217bd31d-f705-4aa7-a8a7-d79e407b7c7b could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1341.289692] env[69994]: DEBUG oslo_vmware.api [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242882, 'name': ReconfigVM_Task, 'duration_secs': 0.737602} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.293056] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Reconfigured VM instance instance-00000076 to attach disk [datastore2] 4f002725-3f15-4d10-a7ee-07132faf6266/4f002725-3f15-4d10-a7ee-07132faf6266.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1341.293056] env[69994]: DEBUG oslo_vmware.api [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242881, 'name': CreateSnapshot_Task, 'duration_secs': 0.895144} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.293305] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-57e68132-7e65-42b0-90e7-c7d9e59ad4cf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.294673] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Created Snapshot of the VM instance {{(pid=69994) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1341.295635] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32defe0e-0d83-439e-bb1f-b9f1b92c80f2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.305705] env[69994]: DEBUG oslo_vmware.api [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1341.305705] env[69994]: value = "task-3242883" [ 1341.305705] env[69994]: _type = "Task" [ 1341.305705] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.313386] env[69994]: DEBUG oslo_vmware.api [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242883, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.577095] env[69994]: DEBUG nova.compute.manager [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1341.719122] env[69994]: INFO nova.compute.manager [-] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Took 1.42 seconds to deallocate network for instance. [ 1341.813303] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Creating linked-clone VM from snapshot {{(pid=69994) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1341.813665] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-94ac7976-ec4c-43fe-b89c-9b40c1dd20e8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.825101] env[69994]: DEBUG oslo_vmware.api [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242883, 'name': Rename_Task, 'duration_secs': 0.166672} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.826239] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1341.826534] env[69994]: DEBUG oslo_vmware.api [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1341.826534] env[69994]: value = "task-3242884" [ 1341.826534] env[69994]: _type = "Task" [ 1341.826534] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.826720] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eb387504-fad4-4ca3-9cf3-25706281d921 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.837388] env[69994]: DEBUG oslo_vmware.api [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242884, 'name': CloneVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.838396] env[69994]: DEBUG oslo_vmware.api [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1341.838396] env[69994]: value = "task-3242885" [ 1341.838396] env[69994]: _type = "Task" [ 1341.838396] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.844964] env[69994]: DEBUG oslo_vmware.api [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242885, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.098594] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1342.098878] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1342.100414] env[69994]: INFO nova.compute.claims [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1342.265765] env[69994]: INFO nova.compute.manager [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Took 0.55 seconds to detach 1 volumes for instance. [ 1342.268632] env[69994]: DEBUG nova.compute.manager [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Deleting volume: c725763f-b1d7-421a-95e2-cd5644ee630e {{(pid=69994) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1342.338388] env[69994]: DEBUG oslo_vmware.api [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242884, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.347961] env[69994]: DEBUG oslo_vmware.api [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242885, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.807902] env[69994]: DEBUG oslo_concurrency.lockutils [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1342.839539] env[69994]: DEBUG oslo_vmware.api [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242884, 'name': CloneVM_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.847447] env[69994]: DEBUG oslo_vmware.api [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242885, 'name': PowerOnVM_Task, 'duration_secs': 0.665958} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.847699] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1342.847901] env[69994]: INFO nova.compute.manager [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Took 7.73 seconds to spawn the instance on the hypervisor. [ 1342.848091] env[69994]: DEBUG nova.compute.manager [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1342.848877] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d01a0a9-a47e-45b2-8c33-1fe2c5e76e96 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.209175] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f309d443-1278-4e89-b3aa-4f5516822774 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.217133] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36e2be38-a5ac-4c10-b504-41c1e330c95c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.248204] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21994c90-ae62-4069-8e9f-f3388b39b228 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.255020] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a670c4-c6d7-4a5a-a54a-f89d2aa65686 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.267781] env[69994]: DEBUG nova.compute.provider_tree [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1343.339401] env[69994]: DEBUG oslo_vmware.api [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242884, 'name': CloneVM_Task} progress is 95%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.366599] env[69994]: INFO nova.compute.manager [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Took 12.47 seconds to build instance. [ 1343.770981] env[69994]: DEBUG nova.scheduler.client.report [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1343.839835] env[69994]: DEBUG oslo_vmware.api [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242884, 'name': CloneVM_Task, 'duration_secs': 1.671639} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1343.840154] env[69994]: INFO nova.virt.vmwareapi.vmops [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Created linked-clone VM from snapshot [ 1343.840827] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f7d4e48-fadb-48db-bf53-067aaf085db2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.847977] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Uploading image 514afc33-9ab7-4777-a973-8c846c0ae8e9 {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1343.868580] env[69994]: DEBUG oslo_vmware.rw_handles [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1343.868580] env[69994]: value = "vm-648062" [ 1343.868580] env[69994]: _type = "VirtualMachine" [ 1343.868580] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1343.869140] env[69994]: DEBUG oslo_concurrency.lockutils [None req-26b68a79-3a58-4230-a19a-5d8079d379aa tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "4f002725-3f15-4d10-a7ee-07132faf6266" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.980s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1343.869254] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-31cecaad-5485-4a3d-82aa-c7963b14d1db {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.876547] env[69994]: DEBUG oslo_vmware.rw_handles [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lease: (returnval){ [ 1343.876547] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]524e7385-ea41-2a46-e0ff-a574d9f2dcf3" [ 1343.876547] env[69994]: _type = "HttpNfcLease" [ 1343.876547] env[69994]: } obtained for exporting VM: (result){ [ 1343.876547] env[69994]: value = "vm-648062" [ 1343.876547] env[69994]: _type = "VirtualMachine" [ 1343.876547] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1343.876547] env[69994]: DEBUG oslo_vmware.api [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the lease: (returnval){ [ 1343.876547] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]524e7385-ea41-2a46-e0ff-a574d9f2dcf3" [ 1343.876547] env[69994]: _type = "HttpNfcLease" [ 1343.876547] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1343.882093] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1343.882093] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]524e7385-ea41-2a46-e0ff-a574d9f2dcf3" [ 1343.882093] env[69994]: _type = "HttpNfcLease" [ 1343.882093] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1344.276468] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.177s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1344.276995] env[69994]: DEBUG nova.compute.manager [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1344.279838] env[69994]: DEBUG oslo_concurrency.lockutils [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.472s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1344.280064] env[69994]: DEBUG nova.objects.instance [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lazy-loading 'resources' on Instance uuid 217bd31d-f705-4aa7-a8a7-d79e407b7c7b {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1344.384515] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1344.384515] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]524e7385-ea41-2a46-e0ff-a574d9f2dcf3" [ 1344.384515] env[69994]: _type = "HttpNfcLease" [ 1344.384515] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1344.384807] env[69994]: DEBUG oslo_vmware.rw_handles [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1344.384807] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]524e7385-ea41-2a46-e0ff-a574d9f2dcf3" [ 1344.384807] env[69994]: _type = "HttpNfcLease" [ 1344.384807] env[69994]: }. {{(pid=69994) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1344.385567] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3717c4d2-c959-4a63-9db3-90ce7d2c7df9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.392423] env[69994]: DEBUG oslo_vmware.rw_handles [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ca6c3e-0201-ebe7-ceb2-e1730b9fbd72/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1344.392590] env[69994]: DEBUG oslo_vmware.rw_handles [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ca6c3e-0201-ebe7-ceb2-e1730b9fbd72/disk-0.vmdk for reading. {{(pid=69994) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1344.448021] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f05ab44a-4f97-42f6-bf08-c156d5d25b39 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "7e92935f-fc1f-4893-8f69-4b97e4729a7f" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1344.448331] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f05ab44a-4f97-42f6-bf08-c156d5d25b39 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "7e92935f-fc1f-4893-8f69-4b97e4729a7f" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1344.448555] env[69994]: DEBUG nova.compute.manager [None req-f05ab44a-4f97-42f6-bf08-c156d5d25b39 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1344.449902] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d968061-022f-4eb7-9e1c-4cab40ad4af8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.457681] env[69994]: DEBUG nova.compute.manager [None req-f05ab44a-4f97-42f6-bf08-c156d5d25b39 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69994) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1344.458291] env[69994]: DEBUG nova.objects.instance [None req-f05ab44a-4f97-42f6-bf08-c156d5d25b39 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lazy-loading 'flavor' on Instance uuid 7e92935f-fc1f-4893-8f69-4b97e4729a7f {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1344.483655] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-879d0929-23f8-4fb5-9398-810cd6465c66 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.491513] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "3c433e3b-4c16-4cfc-a7d5-40e40b0906b3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1344.491743] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "3c433e3b-4c16-4cfc-a7d5-40e40b0906b3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1344.782929] env[69994]: DEBUG nova.compute.utils [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1344.786956] env[69994]: DEBUG nova.compute.manager [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1344.787164] env[69994]: DEBUG nova.network.neutron [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1344.843230] env[69994]: DEBUG nova.policy [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aa110088642d455baaf060b5c9daaf5b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '70d05b502dfd4c5282872339c1e34d0c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1344.927353] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5712d80d-4a1b-426c-bf40-2cbed39c0630 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.937158] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1968279-7daf-4516-9c6d-a112f086fa44 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.971689] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-202fe855-b3e3-463d-b2e1-20e0cca5428a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.981223] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03140b43-1fa6-4c61-87f7-9dd85ac9a152 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.996191] env[69994]: DEBUG nova.compute.manager [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1344.999329] env[69994]: DEBUG nova.compute.provider_tree [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1345.152351] env[69994]: DEBUG nova.network.neutron [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Successfully created port: 3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1345.291717] env[69994]: DEBUG nova.compute.manager [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1345.477453] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f05ab44a-4f97-42f6-bf08-c156d5d25b39 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1345.477721] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c99e0695-ae5e-4ad0-882f-e02930bf5d92 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.485503] env[69994]: DEBUG oslo_vmware.api [None req-f05ab44a-4f97-42f6-bf08-c156d5d25b39 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1345.485503] env[69994]: value = "task-3242888" [ 1345.485503] env[69994]: _type = "Task" [ 1345.485503] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.494648] env[69994]: DEBUG oslo_vmware.api [None req-f05ab44a-4f97-42f6-bf08-c156d5d25b39 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242888, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.504074] env[69994]: DEBUG nova.scheduler.client.report [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1345.527867] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1345.995300] env[69994]: DEBUG oslo_vmware.api [None req-f05ab44a-4f97-42f6-bf08-c156d5d25b39 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242888, 'name': PowerOffVM_Task, 'duration_secs': 0.19926} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.995645] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f05ab44a-4f97-42f6-bf08-c156d5d25b39 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1345.995821] env[69994]: DEBUG nova.compute.manager [None req-f05ab44a-4f97-42f6-bf08-c156d5d25b39 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1345.996715] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8a2142a-804b-4f6c-9406-443eb6cd1d7d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.009834] env[69994]: DEBUG oslo_concurrency.lockutils [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.730s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1346.012183] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.484s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1346.013669] env[69994]: INFO nova.compute.claims [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1346.032164] env[69994]: INFO nova.scheduler.client.report [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Deleted allocations for instance 217bd31d-f705-4aa7-a8a7-d79e407b7c7b [ 1346.302532] env[69994]: DEBUG nova.compute.manager [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1346.331063] env[69994]: DEBUG nova.virt.hardware [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1346.331385] env[69994]: DEBUG nova.virt.hardware [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1346.331578] env[69994]: DEBUG nova.virt.hardware [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1346.331846] env[69994]: DEBUG nova.virt.hardware [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1346.332053] env[69994]: DEBUG nova.virt.hardware [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1346.332259] env[69994]: DEBUG nova.virt.hardware [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1346.332556] env[69994]: DEBUG nova.virt.hardware [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1346.332773] env[69994]: DEBUG nova.virt.hardware [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1346.333022] env[69994]: DEBUG nova.virt.hardware [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1346.333232] env[69994]: DEBUG nova.virt.hardware [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1346.333498] env[69994]: DEBUG nova.virt.hardware [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1346.334555] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e970af1-8c3d-4cd2-934f-4ced261f17fe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.343324] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93a62162-8d9b-4be6-87de-057100690258 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.508362] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f05ab44a-4f97-42f6-bf08-c156d5d25b39 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "7e92935f-fc1f-4893-8f69-4b97e4729a7f" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.060s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1346.535320] env[69994]: DEBUG nova.compute.manager [req-02fec7b8-246a-4361-90d7-45fbe4486699 req-f296704f-4b9a-4906-9ca8-706710bb2d11 service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Received event network-vif-plugged-3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1346.535723] env[69994]: DEBUG oslo_concurrency.lockutils [req-02fec7b8-246a-4361-90d7-45fbe4486699 req-f296704f-4b9a-4906-9ca8-706710bb2d11 service nova] Acquiring lock "902200b2-f2ca-4979-961a-ec046d22d05c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1346.536251] env[69994]: DEBUG oslo_concurrency.lockutils [req-02fec7b8-246a-4361-90d7-45fbe4486699 req-f296704f-4b9a-4906-9ca8-706710bb2d11 service nova] Lock "902200b2-f2ca-4979-961a-ec046d22d05c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1346.536650] env[69994]: DEBUG oslo_concurrency.lockutils [req-02fec7b8-246a-4361-90d7-45fbe4486699 req-f296704f-4b9a-4906-9ca8-706710bb2d11 service nova] Lock "902200b2-f2ca-4979-961a-ec046d22d05c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1346.537088] env[69994]: DEBUG nova.compute.manager [req-02fec7b8-246a-4361-90d7-45fbe4486699 req-f296704f-4b9a-4906-9ca8-706710bb2d11 service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] No waiting events found dispatching network-vif-plugged-3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1346.537423] env[69994]: WARNING nova.compute.manager [req-02fec7b8-246a-4361-90d7-45fbe4486699 req-f296704f-4b9a-4906-9ca8-706710bb2d11 service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Received unexpected event network-vif-plugged-3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5 for instance with vm_state building and task_state spawning. [ 1346.541897] env[69994]: DEBUG oslo_concurrency.lockutils [None req-722475d0-a0e4-4447-950d-5c66fa553470 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "217bd31d-f705-4aa7-a8a7-d79e407b7c7b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.003s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1346.616401] env[69994]: DEBUG nova.network.neutron [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Successfully updated port: 3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1346.791426] env[69994]: DEBUG nova.objects.instance [None req-7de49f98-5983-4ab5-9842-34451b7eacd8 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lazy-loading 'flavor' on Instance uuid 7e92935f-fc1f-4893-8f69-4b97e4729a7f {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1347.119907] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "refresh_cache-902200b2-f2ca-4979-961a-ec046d22d05c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1347.120261] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquired lock "refresh_cache-902200b2-f2ca-4979-961a-ec046d22d05c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1347.120261] env[69994]: DEBUG nova.network.neutron [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1347.164693] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdd89719-432c-4188-93b5-1798217b7582 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.173238] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07ae9197-94aa-4e58-92ab-da9042669f5f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.204849] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d394f93b-54ef-4da7-89ee-e9922d07ab5d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.213456] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97928f9f-7b11-4522-bae3-1a2b6d5a51bd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.227867] env[69994]: DEBUG nova.compute.provider_tree [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1347.296674] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7de49f98-5983-4ab5-9842-34451b7eacd8 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "refresh_cache-7e92935f-fc1f-4893-8f69-4b97e4729a7f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1347.296918] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7de49f98-5983-4ab5-9842-34451b7eacd8 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquired lock "refresh_cache-7e92935f-fc1f-4893-8f69-4b97e4729a7f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1347.297052] env[69994]: DEBUG nova.network.neutron [None req-7de49f98-5983-4ab5-9842-34451b7eacd8 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1347.297162] env[69994]: DEBUG nova.objects.instance [None req-7de49f98-5983-4ab5-9842-34451b7eacd8 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lazy-loading 'info_cache' on Instance uuid 7e92935f-fc1f-4893-8f69-4b97e4729a7f {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1347.434193] env[69994]: DEBUG oslo_concurrency.lockutils [None req-435096da-d50f-4547-8914-977234c88161 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "6b73608e-b62f-4292-870c-51f1c686e569" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1347.434603] env[69994]: DEBUG oslo_concurrency.lockutils [None req-435096da-d50f-4547-8914-977234c88161 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "6b73608e-b62f-4292-870c-51f1c686e569" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1347.434858] env[69994]: DEBUG oslo_concurrency.lockutils [None req-435096da-d50f-4547-8914-977234c88161 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "6b73608e-b62f-4292-870c-51f1c686e569-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1347.435102] env[69994]: DEBUG oslo_concurrency.lockutils [None req-435096da-d50f-4547-8914-977234c88161 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "6b73608e-b62f-4292-870c-51f1c686e569-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1347.435431] env[69994]: DEBUG oslo_concurrency.lockutils [None req-435096da-d50f-4547-8914-977234c88161 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "6b73608e-b62f-4292-870c-51f1c686e569-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1347.437629] env[69994]: INFO nova.compute.manager [None req-435096da-d50f-4547-8914-977234c88161 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Terminating instance [ 1347.663915] env[69994]: DEBUG nova.network.neutron [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1347.731323] env[69994]: DEBUG nova.scheduler.client.report [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1347.801553] env[69994]: DEBUG nova.objects.base [None req-7de49f98-5983-4ab5-9842-34451b7eacd8 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Object Instance<7e92935f-fc1f-4893-8f69-4b97e4729a7f> lazy-loaded attributes: flavor,info_cache {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1347.803575] env[69994]: DEBUG nova.network.neutron [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Updating instance_info_cache with network_info: [{"id": "3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5", "address": "fa:16:3e:dc:3f:63", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cf0f0b5-84", "ovs_interfaceid": "3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1347.942168] env[69994]: DEBUG nova.compute.manager [None req-435096da-d50f-4547-8914-977234c88161 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1347.942423] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-435096da-d50f-4547-8914-977234c88161 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1347.943362] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f17eef65-ebde-4c5e-a0ba-c75ff5d2564a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.951237] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-435096da-d50f-4547-8914-977234c88161 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1347.951476] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3719112a-681d-4e61-9111-67dce69100e0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.957604] env[69994]: DEBUG oslo_vmware.api [None req-435096da-d50f-4547-8914-977234c88161 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1347.957604] env[69994]: value = "task-3242889" [ 1347.957604] env[69994]: _type = "Task" [ 1347.957604] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.965773] env[69994]: DEBUG oslo_vmware.api [None req-435096da-d50f-4547-8914-977234c88161 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242889, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.236817] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.224s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1348.237362] env[69994]: DEBUG nova.compute.manager [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1348.308933] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Releasing lock "refresh_cache-902200b2-f2ca-4979-961a-ec046d22d05c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1348.309349] env[69994]: DEBUG nova.compute.manager [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Instance network_info: |[{"id": "3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5", "address": "fa:16:3e:dc:3f:63", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cf0f0b5-84", "ovs_interfaceid": "3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1348.310351] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dc:3f:63', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a9d50784-eb90-48ae-a4ea-2125c52a50d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1348.322718] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1348.323536] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1348.323816] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-debae313-e22f-4a54-9ccf-289b8d390949 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.355344] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1348.355344] env[69994]: value = "task-3242890" [ 1348.355344] env[69994]: _type = "Task" [ 1348.355344] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.365628] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242890, 'name': CreateVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.466791] env[69994]: DEBUG oslo_vmware.api [None req-435096da-d50f-4547-8914-977234c88161 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242889, 'name': PowerOffVM_Task, 'duration_secs': 0.409593} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.467067] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-435096da-d50f-4547-8914-977234c88161 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1348.467242] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-435096da-d50f-4547-8914-977234c88161 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1348.467496] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1dd0090a-fe67-4587-bac7-f62f0c2842af {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.534122] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-435096da-d50f-4547-8914-977234c88161 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1348.534514] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-435096da-d50f-4547-8914-977234c88161 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1348.534685] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-435096da-d50f-4547-8914-977234c88161 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Deleting the datastore file [datastore2] 6b73608e-b62f-4292-870c-51f1c686e569 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1348.534955] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-76079c59-94f2-480f-bf02-79168c8f70fb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.541652] env[69994]: DEBUG oslo_vmware.api [None req-435096da-d50f-4547-8914-977234c88161 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1348.541652] env[69994]: value = "task-3242892" [ 1348.541652] env[69994]: _type = "Task" [ 1348.541652] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.549934] env[69994]: DEBUG oslo_vmware.api [None req-435096da-d50f-4547-8914-977234c88161 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242892, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.552772] env[69994]: DEBUG nova.network.neutron [None req-7de49f98-5983-4ab5-9842-34451b7eacd8 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Updating instance_info_cache with network_info: [{"id": "521061f8-5fe8-473b-ba95-6d17064efb80", "address": "fa:16:3e:ca:b0:f9", "network": {"id": "ac8cc640-01c3-4a64-8b69-1458ee2131a1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1685085861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38d5a89ed7c248c3be506ef12caf5f1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap521061f8-5f", "ovs_interfaceid": "521061f8-5fe8-473b-ba95-6d17064efb80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1348.568218] env[69994]: DEBUG nova.compute.manager [req-e3509aba-36bb-4c03-87d1-cc3f7f8b096c req-fd53434f-2b53-49e5-ac5f-dc64a310c82c service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Received event network-changed-3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1348.568422] env[69994]: DEBUG nova.compute.manager [req-e3509aba-36bb-4c03-87d1-cc3f7f8b096c req-fd53434f-2b53-49e5-ac5f-dc64a310c82c service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Refreshing instance network info cache due to event network-changed-3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1348.568633] env[69994]: DEBUG oslo_concurrency.lockutils [req-e3509aba-36bb-4c03-87d1-cc3f7f8b096c req-fd53434f-2b53-49e5-ac5f-dc64a310c82c service nova] Acquiring lock "refresh_cache-902200b2-f2ca-4979-961a-ec046d22d05c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1348.568794] env[69994]: DEBUG oslo_concurrency.lockutils [req-e3509aba-36bb-4c03-87d1-cc3f7f8b096c req-fd53434f-2b53-49e5-ac5f-dc64a310c82c service nova] Acquired lock "refresh_cache-902200b2-f2ca-4979-961a-ec046d22d05c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1348.568921] env[69994]: DEBUG nova.network.neutron [req-e3509aba-36bb-4c03-87d1-cc3f7f8b096c req-fd53434f-2b53-49e5-ac5f-dc64a310c82c service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Refreshing network info cache for port 3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1348.743582] env[69994]: DEBUG nova.compute.utils [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1348.745852] env[69994]: DEBUG nova.compute.manager [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1348.745852] env[69994]: DEBUG nova.network.neutron [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1348.783200] env[69994]: DEBUG nova.policy [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '432370da6f1840db8f93b613ca52e31d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '42ee300d6f33459da1deb82b1b14cf74', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1348.867894] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242890, 'name': CreateVM_Task, 'duration_secs': 0.387803} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.868198] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1348.868792] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1348.869031] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1348.869324] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1348.869585] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e33d5ce-d88b-4614-a9f3-c9756519e37d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.874604] env[69994]: DEBUG oslo_vmware.api [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1348.874604] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52fe6899-aa43-bd3c-5db2-8e560bf1e6fb" [ 1348.874604] env[69994]: _type = "Task" [ 1348.874604] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.882394] env[69994]: DEBUG oslo_vmware.api [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52fe6899-aa43-bd3c-5db2-8e560bf1e6fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.047669] env[69994]: DEBUG nova.network.neutron [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Successfully created port: 3683f9a3-6187-49b8-85d9-0d4baaf1f783 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1349.054506] env[69994]: DEBUG oslo_vmware.api [None req-435096da-d50f-4547-8914-977234c88161 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242892, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.189726} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.054758] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-435096da-d50f-4547-8914-977234c88161 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1349.054943] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-435096da-d50f-4547-8914-977234c88161 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1349.055134] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-435096da-d50f-4547-8914-977234c88161 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1349.055322] env[69994]: INFO nova.compute.manager [None req-435096da-d50f-4547-8914-977234c88161 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1349.059019] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-435096da-d50f-4547-8914-977234c88161 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1349.059019] env[69994]: DEBUG nova.compute.manager [-] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1349.059019] env[69994]: DEBUG nova.network.neutron [-] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1349.059019] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7de49f98-5983-4ab5-9842-34451b7eacd8 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Releasing lock "refresh_cache-7e92935f-fc1f-4893-8f69-4b97e4729a7f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1349.249929] env[69994]: DEBUG nova.compute.manager [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1349.385994] env[69994]: DEBUG oslo_vmware.api [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52fe6899-aa43-bd3c-5db2-8e560bf1e6fb, 'name': SearchDatastore_Task, 'duration_secs': 0.011227} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.387326] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1349.387326] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1349.387326] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1349.387326] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1349.387326] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1349.387326] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-02811423-b019-4a0d-9d8d-427db3e8a7e8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.390445] env[69994]: DEBUG nova.network.neutron [req-e3509aba-36bb-4c03-87d1-cc3f7f8b096c req-fd53434f-2b53-49e5-ac5f-dc64a310c82c service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Updated VIF entry in instance network info cache for port 3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1349.390765] env[69994]: DEBUG nova.network.neutron [req-e3509aba-36bb-4c03-87d1-cc3f7f8b096c req-fd53434f-2b53-49e5-ac5f-dc64a310c82c service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Updating instance_info_cache with network_info: [{"id": "3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5", "address": "fa:16:3e:dc:3f:63", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cf0f0b5-84", "ovs_interfaceid": "3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1349.396311] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1349.396498] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1349.397273] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3cfcec8-576c-4c50-b2f0-48c04c63b511 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.403432] env[69994]: DEBUG oslo_vmware.api [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1349.403432] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]523233e3-7301-74f6-90b7-05970a31dc64" [ 1349.403432] env[69994]: _type = "Task" [ 1349.403432] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.412920] env[69994]: DEBUG oslo_vmware.api [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]523233e3-7301-74f6-90b7-05970a31dc64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.414976] env[69994]: DEBUG nova.compute.manager [req-87f4a110-4a22-4f8a-960c-fd4cddaf30f5 req-e4847782-f8a7-468e-a202-19d8ea5df669 service nova] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Received event network-vif-deleted-7ce56c69-d544-4ac1-b891-5678e0fd77ef {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1349.415197] env[69994]: INFO nova.compute.manager [req-87f4a110-4a22-4f8a-960c-fd4cddaf30f5 req-e4847782-f8a7-468e-a202-19d8ea5df669 service nova] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Neutron deleted interface 7ce56c69-d544-4ac1-b891-5678e0fd77ef; detaching it from the instance and deleting it from the info cache [ 1349.415393] env[69994]: DEBUG nova.network.neutron [req-87f4a110-4a22-4f8a-960c-fd4cddaf30f5 req-e4847782-f8a7-468e-a202-19d8ea5df669 service nova] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1349.893920] env[69994]: DEBUG oslo_concurrency.lockutils [req-e3509aba-36bb-4c03-87d1-cc3f7f8b096c req-fd53434f-2b53-49e5-ac5f-dc64a310c82c service nova] Releasing lock "refresh_cache-902200b2-f2ca-4979-961a-ec046d22d05c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1349.896393] env[69994]: DEBUG nova.network.neutron [-] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1349.916336] env[69994]: DEBUG oslo_vmware.api [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]523233e3-7301-74f6-90b7-05970a31dc64, 'name': SearchDatastore_Task, 'duration_secs': 0.013752} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.917391] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-859e168f-6571-4a9c-ad13-1c4ea864e1c5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.919603] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-20f224da-e6a9-4338-b31a-6a2977f3d05e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.924611] env[69994]: DEBUG oslo_vmware.api [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1349.924611] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52678deb-6c91-c30e-bb0c-b590235bdc96" [ 1349.924611] env[69994]: _type = "Task" [ 1349.924611] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.930807] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ce02143-0223-4439-9496-26ce6726fe6f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.947519] env[69994]: DEBUG oslo_vmware.api [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52678deb-6c91-c30e-bb0c-b590235bdc96, 'name': SearchDatastore_Task, 'duration_secs': 0.010574} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.947749] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1349.947995] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 902200b2-f2ca-4979-961a-ec046d22d05c/902200b2-f2ca-4979-961a-ec046d22d05c.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1349.948251] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9d3488b9-2b57-468c-b783-22cb60490b3a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.960331] env[69994]: DEBUG nova.compute.manager [req-87f4a110-4a22-4f8a-960c-fd4cddaf30f5 req-e4847782-f8a7-468e-a202-19d8ea5df669 service nova] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Detach interface failed, port_id=7ce56c69-d544-4ac1-b891-5678e0fd77ef, reason: Instance 6b73608e-b62f-4292-870c-51f1c686e569 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1349.961789] env[69994]: DEBUG oslo_vmware.api [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1349.961789] env[69994]: value = "task-3242893" [ 1349.961789] env[69994]: _type = "Task" [ 1349.961789] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.968980] env[69994]: DEBUG oslo_vmware.api [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242893, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.064062] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7de49f98-5983-4ab5-9842-34451b7eacd8 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1350.064585] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-49a00e1b-7f9d-4186-93ab-53d83a1d5eec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.073828] env[69994]: DEBUG oslo_vmware.api [None req-7de49f98-5983-4ab5-9842-34451b7eacd8 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1350.073828] env[69994]: value = "task-3242894" [ 1350.073828] env[69994]: _type = "Task" [ 1350.073828] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.082761] env[69994]: DEBUG oslo_vmware.api [None req-7de49f98-5983-4ab5-9842-34451b7eacd8 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242894, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.258872] env[69994]: DEBUG nova.compute.manager [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1350.289511] env[69994]: DEBUG nova.virt.hardware [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1350.289784] env[69994]: DEBUG nova.virt.hardware [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1350.289934] env[69994]: DEBUG nova.virt.hardware [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1350.290131] env[69994]: DEBUG nova.virt.hardware [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1350.290288] env[69994]: DEBUG nova.virt.hardware [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1350.290463] env[69994]: DEBUG nova.virt.hardware [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1350.290681] env[69994]: DEBUG nova.virt.hardware [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1350.290989] env[69994]: DEBUG nova.virt.hardware [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1350.291042] env[69994]: DEBUG nova.virt.hardware [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1350.291272] env[69994]: DEBUG nova.virt.hardware [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1350.291460] env[69994]: DEBUG nova.virt.hardware [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1350.292459] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1daecd22-38de-4abb-834c-91dd22e3952e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.306302] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bc243b1-97bf-47fb-932b-5323fc1b22ce {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.399031] env[69994]: INFO nova.compute.manager [-] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Took 1.34 seconds to deallocate network for instance. [ 1350.475014] env[69994]: DEBUG oslo_vmware.api [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242893, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.583992] env[69994]: DEBUG oslo_vmware.api [None req-7de49f98-5983-4ab5-9842-34451b7eacd8 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242894, 'name': PowerOnVM_Task, 'duration_secs': 0.447667} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.585543] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-7de49f98-5983-4ab5-9842-34451b7eacd8 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1350.585773] env[69994]: DEBUG nova.compute.manager [None req-7de49f98-5983-4ab5-9842-34451b7eacd8 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1350.586652] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-794fba5d-d431-4f99-8955-6a476a266810 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.597588] env[69994]: DEBUG nova.compute.manager [req-7ad5f817-f4e1-4bc0-9b94-579a11416c06 req-a4d59e7d-ad0b-43a7-8e59-b5b1ad0b5305 service nova] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Received event network-vif-plugged-3683f9a3-6187-49b8-85d9-0d4baaf1f783 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1350.597798] env[69994]: DEBUG oslo_concurrency.lockutils [req-7ad5f817-f4e1-4bc0-9b94-579a11416c06 req-a4d59e7d-ad0b-43a7-8e59-b5b1ad0b5305 service nova] Acquiring lock "3c433e3b-4c16-4cfc-a7d5-40e40b0906b3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1350.598009] env[69994]: DEBUG oslo_concurrency.lockutils [req-7ad5f817-f4e1-4bc0-9b94-579a11416c06 req-a4d59e7d-ad0b-43a7-8e59-b5b1ad0b5305 service nova] Lock "3c433e3b-4c16-4cfc-a7d5-40e40b0906b3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1350.598187] env[69994]: DEBUG oslo_concurrency.lockutils [req-7ad5f817-f4e1-4bc0-9b94-579a11416c06 req-a4d59e7d-ad0b-43a7-8e59-b5b1ad0b5305 service nova] Lock "3c433e3b-4c16-4cfc-a7d5-40e40b0906b3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1350.598354] env[69994]: DEBUG nova.compute.manager [req-7ad5f817-f4e1-4bc0-9b94-579a11416c06 req-a4d59e7d-ad0b-43a7-8e59-b5b1ad0b5305 service nova] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] No waiting events found dispatching network-vif-plugged-3683f9a3-6187-49b8-85d9-0d4baaf1f783 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1350.598519] env[69994]: WARNING nova.compute.manager [req-7ad5f817-f4e1-4bc0-9b94-579a11416c06 req-a4d59e7d-ad0b-43a7-8e59-b5b1ad0b5305 service nova] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Received unexpected event network-vif-plugged-3683f9a3-6187-49b8-85d9-0d4baaf1f783 for instance with vm_state building and task_state spawning. [ 1350.680885] env[69994]: DEBUG nova.network.neutron [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Successfully updated port: 3683f9a3-6187-49b8-85d9-0d4baaf1f783 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1350.906157] env[69994]: DEBUG oslo_concurrency.lockutils [None req-435096da-d50f-4547-8914-977234c88161 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1350.906440] env[69994]: DEBUG oslo_concurrency.lockutils [None req-435096da-d50f-4547-8914-977234c88161 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1350.906662] env[69994]: DEBUG nova.objects.instance [None req-435096da-d50f-4547-8914-977234c88161 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lazy-loading 'resources' on Instance uuid 6b73608e-b62f-4292-870c-51f1c686e569 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1350.971809] env[69994]: DEBUG oslo_vmware.api [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242893, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.540236} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.972091] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 902200b2-f2ca-4979-961a-ec046d22d05c/902200b2-f2ca-4979-961a-ec046d22d05c.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1350.972310] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1350.972567] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aa4b7e72-7d44-4e3c-ba76-112a0608e3de {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.979493] env[69994]: DEBUG oslo_vmware.api [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1350.979493] env[69994]: value = "task-3242895" [ 1350.979493] env[69994]: _type = "Task" [ 1350.979493] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.987786] env[69994]: DEBUG oslo_vmware.api [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242895, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.184013] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "refresh_cache-3c433e3b-4c16-4cfc-a7d5-40e40b0906b3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1351.184013] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquired lock "refresh_cache-3c433e3b-4c16-4cfc-a7d5-40e40b0906b3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1351.184013] env[69994]: DEBUG nova.network.neutron [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1351.491763] env[69994]: DEBUG oslo_vmware.api [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242895, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088058} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.494348] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1351.495377] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b94f325-68cf-46b6-a3f6-5ccf6d394422 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.517876] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Reconfiguring VM instance instance-00000077 to attach disk [datastore2] 902200b2-f2ca-4979-961a-ec046d22d05c/902200b2-f2ca-4979-961a-ec046d22d05c.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1351.520634] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a8e39d86-7ad0-4076-827d-25b8aa785382 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.541657] env[69994]: DEBUG oslo_vmware.api [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1351.541657] env[69994]: value = "task-3242896" [ 1351.541657] env[69994]: _type = "Task" [ 1351.541657] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.553577] env[69994]: DEBUG oslo_vmware.api [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242896, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.558564] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eaa385c-4bcf-47c5-9721-aa5c7ed853db {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.566699] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77aa79b0-2fb8-4ca8-8f87-84ec6a4f55a2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.599971] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-415a7623-3850-4bdc-922d-81d1d9518dc6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.607836] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91a62e7e-b10f-4cde-ab2a-581dcda34f67 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.622070] env[69994]: DEBUG nova.compute.provider_tree [None req-435096da-d50f-4547-8914-977234c88161 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1351.632116] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89b9ce62-4811-40d2-9249-d6e634e3de03 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.638546] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9e131f1e-25b9-45d4-b01d-ef4f2ee8ce15 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Suspending the VM {{(pid=69994) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1351.638807] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-4c99a9d8-7ff5-4484-9e2a-e566655063ba {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.644759] env[69994]: DEBUG oslo_vmware.api [None req-9e131f1e-25b9-45d4-b01d-ef4f2ee8ce15 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1351.644759] env[69994]: value = "task-3242897" [ 1351.644759] env[69994]: _type = "Task" [ 1351.644759] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.652712] env[69994]: DEBUG oslo_vmware.api [None req-9e131f1e-25b9-45d4-b01d-ef4f2ee8ce15 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242897, 'name': SuspendVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.729626] env[69994]: DEBUG nova.network.neutron [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1351.884431] env[69994]: DEBUG nova.network.neutron [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Updating instance_info_cache with network_info: [{"id": "3683f9a3-6187-49b8-85d9-0d4baaf1f783", "address": "fa:16:3e:95:33:02", "network": {"id": "d0928b0d-e338-4b93-b259-f37cac9dde85", "bridge": "br-int", "label": "tempest-ServersTestJSON-727748163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "42ee300d6f33459da1deb82b1b14cf74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089ef678-58b4-4bf0-a39d-b94b2d364291", "external-id": "nsx-vlan-transportzone-675", "segmentation_id": 675, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3683f9a3-61", "ovs_interfaceid": "3683f9a3-6187-49b8-85d9-0d4baaf1f783", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1352.052510] env[69994]: DEBUG oslo_vmware.api [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242896, 'name': ReconfigVM_Task, 'duration_secs': 0.34216} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.052859] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Reconfigured VM instance instance-00000077 to attach disk [datastore2] 902200b2-f2ca-4979-961a-ec046d22d05c/902200b2-f2ca-4979-961a-ec046d22d05c.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1352.053598] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6c6ee991-d560-4a1d-9431-3bfa11e6666b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.059874] env[69994]: DEBUG oslo_vmware.api [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1352.059874] env[69994]: value = "task-3242898" [ 1352.059874] env[69994]: _type = "Task" [ 1352.059874] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.068189] env[69994]: DEBUG oslo_vmware.api [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242898, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.125758] env[69994]: DEBUG nova.scheduler.client.report [None req-435096da-d50f-4547-8914-977234c88161 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1352.155579] env[69994]: DEBUG oslo_vmware.api [None req-9e131f1e-25b9-45d4-b01d-ef4f2ee8ce15 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242897, 'name': SuspendVM_Task} progress is 50%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.387931] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Releasing lock "refresh_cache-3c433e3b-4c16-4cfc-a7d5-40e40b0906b3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1352.388416] env[69994]: DEBUG nova.compute.manager [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Instance network_info: |[{"id": "3683f9a3-6187-49b8-85d9-0d4baaf1f783", "address": "fa:16:3e:95:33:02", "network": {"id": "d0928b0d-e338-4b93-b259-f37cac9dde85", "bridge": "br-int", "label": "tempest-ServersTestJSON-727748163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "42ee300d6f33459da1deb82b1b14cf74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089ef678-58b4-4bf0-a39d-b94b2d364291", "external-id": "nsx-vlan-transportzone-675", "segmentation_id": 675, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3683f9a3-61", "ovs_interfaceid": "3683f9a3-6187-49b8-85d9-0d4baaf1f783", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1352.388898] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:95:33:02', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '089ef678-58b4-4bf0-a39d-b94b2d364291', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3683f9a3-6187-49b8-85d9-0d4baaf1f783', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1352.396881] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1352.397145] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1352.397406] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4dc8db7d-7d0c-4bd4-a9c2-8461570bfb8e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.418102] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1352.418102] env[69994]: value = "task-3242899" [ 1352.418102] env[69994]: _type = "Task" [ 1352.418102] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.427408] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242899, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.569988] env[69994]: DEBUG oslo_vmware.api [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242898, 'name': Rename_Task, 'duration_secs': 0.150408} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.570311] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1352.570578] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f564a1e4-9d34-4dab-baea-380001c8b702 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.577209] env[69994]: DEBUG oslo_vmware.api [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1352.577209] env[69994]: value = "task-3242900" [ 1352.577209] env[69994]: _type = "Task" [ 1352.577209] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.585534] env[69994]: DEBUG oslo_vmware.api [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242900, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.628597] env[69994]: DEBUG nova.compute.manager [req-06e849b5-aec1-4613-bfc3-cdc688b87d04 req-4005ea7d-91d1-49c7-b0cd-46d62cc5436b service nova] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Received event network-changed-3683f9a3-6187-49b8-85d9-0d4baaf1f783 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1352.628885] env[69994]: DEBUG nova.compute.manager [req-06e849b5-aec1-4613-bfc3-cdc688b87d04 req-4005ea7d-91d1-49c7-b0cd-46d62cc5436b service nova] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Refreshing instance network info cache due to event network-changed-3683f9a3-6187-49b8-85d9-0d4baaf1f783. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1352.629117] env[69994]: DEBUG oslo_concurrency.lockutils [req-06e849b5-aec1-4613-bfc3-cdc688b87d04 req-4005ea7d-91d1-49c7-b0cd-46d62cc5436b service nova] Acquiring lock "refresh_cache-3c433e3b-4c16-4cfc-a7d5-40e40b0906b3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1352.629286] env[69994]: DEBUG oslo_concurrency.lockutils [req-06e849b5-aec1-4613-bfc3-cdc688b87d04 req-4005ea7d-91d1-49c7-b0cd-46d62cc5436b service nova] Acquired lock "refresh_cache-3c433e3b-4c16-4cfc-a7d5-40e40b0906b3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1352.629455] env[69994]: DEBUG nova.network.neutron [req-06e849b5-aec1-4613-bfc3-cdc688b87d04 req-4005ea7d-91d1-49c7-b0cd-46d62cc5436b service nova] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Refreshing network info cache for port 3683f9a3-6187-49b8-85d9-0d4baaf1f783 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1352.631264] env[69994]: DEBUG oslo_concurrency.lockutils [None req-435096da-d50f-4547-8914-977234c88161 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.725s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1352.651901] env[69994]: INFO nova.scheduler.client.report [None req-435096da-d50f-4547-8914-977234c88161 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Deleted allocations for instance 6b73608e-b62f-4292-870c-51f1c686e569 [ 1352.661773] env[69994]: DEBUG oslo_vmware.api [None req-9e131f1e-25b9-45d4-b01d-ef4f2ee8ce15 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242897, 'name': SuspendVM_Task, 'duration_secs': 0.665524} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.662094] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9e131f1e-25b9-45d4-b01d-ef4f2ee8ce15 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Suspended the VM {{(pid=69994) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1352.662508] env[69994]: DEBUG nova.compute.manager [None req-9e131f1e-25b9-45d4-b01d-ef4f2ee8ce15 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1352.663181] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e00f3be-1c62-467d-9c3e-87df18b4ec20 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.788935] env[69994]: DEBUG oslo_vmware.rw_handles [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ca6c3e-0201-ebe7-ceb2-e1730b9fbd72/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1352.790078] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d44c0b0-f901-44d4-82d5-7739f6678b9b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.795891] env[69994]: DEBUG oslo_vmware.rw_handles [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ca6c3e-0201-ebe7-ceb2-e1730b9fbd72/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1352.796068] env[69994]: ERROR oslo_vmware.rw_handles [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ca6c3e-0201-ebe7-ceb2-e1730b9fbd72/disk-0.vmdk due to incomplete transfer. [ 1352.796296] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-4ab35b59-0b98-4725-b578-c5987a1392ef {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.802794] env[69994]: DEBUG oslo_vmware.rw_handles [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ca6c3e-0201-ebe7-ceb2-e1730b9fbd72/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1352.802981] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Uploaded image 514afc33-9ab7-4777-a973-8c846c0ae8e9 to the Glance image server {{(pid=69994) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1352.805461] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Destroying the VM {{(pid=69994) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1352.805680] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-0aa61b06-8efb-418d-9573-686fae330a18 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.810702] env[69994]: DEBUG oslo_vmware.api [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1352.810702] env[69994]: value = "task-3242901" [ 1352.810702] env[69994]: _type = "Task" [ 1352.810702] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.817756] env[69994]: DEBUG oslo_vmware.api [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242901, 'name': Destroy_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.929033] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242899, 'name': CreateVM_Task, 'duration_secs': 0.368583} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.929293] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1352.930061] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1352.930288] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1352.930651] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1352.930957] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2526e19e-f5d5-4fe8-b251-5fd4764eaf51 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.935883] env[69994]: DEBUG oslo_vmware.api [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1352.935883] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]526fd05f-45b5-3670-0cd2-c8cd0d2ae9da" [ 1352.935883] env[69994]: _type = "Task" [ 1352.935883] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.944956] env[69994]: DEBUG oslo_vmware.api [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]526fd05f-45b5-3670-0cd2-c8cd0d2ae9da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.086356] env[69994]: DEBUG oslo_vmware.api [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242900, 'name': PowerOnVM_Task, 'duration_secs': 0.457863} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.086608] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1353.086805] env[69994]: INFO nova.compute.manager [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Took 6.78 seconds to spawn the instance on the hypervisor. [ 1353.086985] env[69994]: DEBUG nova.compute.manager [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1353.087809] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47b20c52-878b-4ce8-9815-37ad3162f8f4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.160105] env[69994]: DEBUG oslo_concurrency.lockutils [None req-435096da-d50f-4547-8914-977234c88161 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "6b73608e-b62f-4292-870c-51f1c686e569" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.725s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1353.322598] env[69994]: DEBUG oslo_vmware.api [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242901, 'name': Destroy_Task, 'duration_secs': 0.307011} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.322848] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Destroyed the VM [ 1353.323109] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Deleting Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1353.323356] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a3f0aa23-1dc8-436d-a13c-6bf57cb8076f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.330514] env[69994]: DEBUG oslo_vmware.api [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1353.330514] env[69994]: value = "task-3242902" [ 1353.330514] env[69994]: _type = "Task" [ 1353.330514] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.337721] env[69994]: DEBUG oslo_vmware.api [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242902, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.345967] env[69994]: DEBUG nova.network.neutron [req-06e849b5-aec1-4613-bfc3-cdc688b87d04 req-4005ea7d-91d1-49c7-b0cd-46d62cc5436b service nova] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Updated VIF entry in instance network info cache for port 3683f9a3-6187-49b8-85d9-0d4baaf1f783. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1353.346321] env[69994]: DEBUG nova.network.neutron [req-06e849b5-aec1-4613-bfc3-cdc688b87d04 req-4005ea7d-91d1-49c7-b0cd-46d62cc5436b service nova] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Updating instance_info_cache with network_info: [{"id": "3683f9a3-6187-49b8-85d9-0d4baaf1f783", "address": "fa:16:3e:95:33:02", "network": {"id": "d0928b0d-e338-4b93-b259-f37cac9dde85", "bridge": "br-int", "label": "tempest-ServersTestJSON-727748163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "42ee300d6f33459da1deb82b1b14cf74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089ef678-58b4-4bf0-a39d-b94b2d364291", "external-id": "nsx-vlan-transportzone-675", "segmentation_id": 675, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3683f9a3-61", "ovs_interfaceid": "3683f9a3-6187-49b8-85d9-0d4baaf1f783", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1353.446209] env[69994]: DEBUG oslo_vmware.api [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]526fd05f-45b5-3670-0cd2-c8cd0d2ae9da, 'name': SearchDatastore_Task, 'duration_secs': 0.010297} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.446506] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1353.446740] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1353.446974] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1353.447135] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1353.447314] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1353.447574] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2af2e27d-edc1-4632-8eab-95fbfaa44257 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.456677] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1353.456864] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1353.457633] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c7e32dd-e9dd-43c3-8e33-4eb38be5b850 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.462776] env[69994]: DEBUG oslo_vmware.api [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1353.462776] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]526b9ade-ce2f-f841-b88a-79f45a5f2af7" [ 1353.462776] env[69994]: _type = "Task" [ 1353.462776] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.470132] env[69994]: DEBUG oslo_vmware.api [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]526b9ade-ce2f-f841-b88a-79f45a5f2af7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.605898] env[69994]: INFO nova.compute.manager [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Took 11.52 seconds to build instance. [ 1353.841288] env[69994]: DEBUG oslo_vmware.api [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242902, 'name': RemoveSnapshot_Task, 'duration_secs': 0.381707} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.841586] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Deleted Snapshot of the VM instance {{(pid=69994) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1353.841832] env[69994]: DEBUG nova.compute.manager [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1353.842678] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-537dab70-d7e4-41ff-a9ec-063a94b0bf0e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.848761] env[69994]: DEBUG oslo_concurrency.lockutils [req-06e849b5-aec1-4613-bfc3-cdc688b87d04 req-4005ea7d-91d1-49c7-b0cd-46d62cc5436b service nova] Releasing lock "refresh_cache-3c433e3b-4c16-4cfc-a7d5-40e40b0906b3" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1353.973374] env[69994]: DEBUG oslo_vmware.api [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]526b9ade-ce2f-f841-b88a-79f45a5f2af7, 'name': SearchDatastore_Task, 'duration_secs': 0.012915} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.974328] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-615d1682-009e-4080-a6ae-3c6a0737c4b8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.978962] env[69994]: DEBUG oslo_vmware.api [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1353.978962] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5234deea-6707-df42-37b2-eacf15b9d2e8" [ 1353.978962] env[69994]: _type = "Task" [ 1353.978962] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.986207] env[69994]: DEBUG oslo_vmware.api [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5234deea-6707-df42-37b2-eacf15b9d2e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.036537] env[69994]: INFO nova.compute.manager [None req-93b85397-4f9d-4d20-bd24-bb27d0ed638a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Resuming [ 1354.037162] env[69994]: DEBUG nova.objects.instance [None req-93b85397-4f9d-4d20-bd24-bb27d0ed638a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lazy-loading 'flavor' on Instance uuid 7e92935f-fc1f-4893-8f69-4b97e4729a7f {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1354.108488] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4fa08baf-f550-4878-b9e5-1b4a3dd5876d tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "902200b2-f2ca-4979-961a-ec046d22d05c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.034s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1354.355961] env[69994]: INFO nova.compute.manager [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Shelve offloading [ 1354.489723] env[69994]: DEBUG oslo_vmware.api [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5234deea-6707-df42-37b2-eacf15b9d2e8, 'name': SearchDatastore_Task, 'duration_secs': 0.029229} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.489995] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1354.490271] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3/3c433e3b-4c16-4cfc-a7d5-40e40b0906b3.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1354.490564] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d918aad8-49b3-4edd-a4b9-ea0fdc7d13c5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.497857] env[69994]: DEBUG oslo_vmware.api [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1354.497857] env[69994]: value = "task-3242904" [ 1354.497857] env[69994]: _type = "Task" [ 1354.497857] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.505787] env[69994]: DEBUG oslo_vmware.api [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242904, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.657519] env[69994]: DEBUG nova.compute.manager [req-622879da-9613-468e-b019-56fcb0ba9617 req-6241faba-7f10-4e03-908b-86d539652825 service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Received event network-changed-3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1354.657812] env[69994]: DEBUG nova.compute.manager [req-622879da-9613-468e-b019-56fcb0ba9617 req-6241faba-7f10-4e03-908b-86d539652825 service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Refreshing instance network info cache due to event network-changed-3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1354.658071] env[69994]: DEBUG oslo_concurrency.lockutils [req-622879da-9613-468e-b019-56fcb0ba9617 req-6241faba-7f10-4e03-908b-86d539652825 service nova] Acquiring lock "refresh_cache-902200b2-f2ca-4979-961a-ec046d22d05c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1354.658110] env[69994]: DEBUG oslo_concurrency.lockutils [req-622879da-9613-468e-b019-56fcb0ba9617 req-6241faba-7f10-4e03-908b-86d539652825 service nova] Acquired lock "refresh_cache-902200b2-f2ca-4979-961a-ec046d22d05c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1354.658351] env[69994]: DEBUG nova.network.neutron [req-622879da-9613-468e-b019-56fcb0ba9617 req-6241faba-7f10-4e03-908b-86d539652825 service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Refreshing network info cache for port 3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1354.859982] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1354.860311] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d33e93e8-79cb-4d4e-b444-e0a126e2213b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.868881] env[69994]: DEBUG oslo_vmware.api [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1354.868881] env[69994]: value = "task-3242905" [ 1354.868881] env[69994]: _type = "Task" [ 1354.868881] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.876841] env[69994]: DEBUG oslo_vmware.api [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242905, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.009484] env[69994]: DEBUG oslo_vmware.api [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242904, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.053985] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "4c949f32-d395-4bcb-b998-d2f4a7741d00" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1355.054336] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "4c949f32-d395-4bcb-b998-d2f4a7741d00" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1355.370167] env[69994]: DEBUG nova.network.neutron [req-622879da-9613-468e-b019-56fcb0ba9617 req-6241faba-7f10-4e03-908b-86d539652825 service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Updated VIF entry in instance network info cache for port 3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1355.370597] env[69994]: DEBUG nova.network.neutron [req-622879da-9613-468e-b019-56fcb0ba9617 req-6241faba-7f10-4e03-908b-86d539652825 service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Updating instance_info_cache with network_info: [{"id": "3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5", "address": "fa:16:3e:dc:3f:63", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cf0f0b5-84", "ovs_interfaceid": "3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1355.381587] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] VM already powered off {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1355.381796] env[69994]: DEBUG nova.compute.manager [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1355.382583] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67816fe4-1b4d-4848-92b8-650e8d7748b0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.389010] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquiring lock "refresh_cache-71ee4730-f0e5-4c71-8053-be9e73b702a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1355.389192] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquired lock "refresh_cache-71ee4730-f0e5-4c71-8053-be9e73b702a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1355.389360] env[69994]: DEBUG nova.network.neutron [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1355.509131] env[69994]: DEBUG oslo_vmware.api [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242904, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.866412} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.509404] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3/3c433e3b-4c16-4cfc-a7d5-40e40b0906b3.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1355.509642] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1355.509862] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0c1e7b7d-8b26-4469-9d53-567409b99dd5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.516784] env[69994]: DEBUG oslo_vmware.api [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1355.516784] env[69994]: value = "task-3242906" [ 1355.516784] env[69994]: _type = "Task" [ 1355.516784] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.525897] env[69994]: DEBUG oslo_vmware.api [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242906, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.548988] env[69994]: DEBUG oslo_concurrency.lockutils [None req-93b85397-4f9d-4d20-bd24-bb27d0ed638a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "refresh_cache-7e92935f-fc1f-4893-8f69-4b97e4729a7f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1355.549212] env[69994]: DEBUG oslo_concurrency.lockutils [None req-93b85397-4f9d-4d20-bd24-bb27d0ed638a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquired lock "refresh_cache-7e92935f-fc1f-4893-8f69-4b97e4729a7f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1355.549408] env[69994]: DEBUG nova.network.neutron [None req-93b85397-4f9d-4d20-bd24-bb27d0ed638a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1355.557858] env[69994]: DEBUG nova.compute.manager [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1355.876852] env[69994]: DEBUG oslo_concurrency.lockutils [req-622879da-9613-468e-b019-56fcb0ba9617 req-6241faba-7f10-4e03-908b-86d539652825 service nova] Releasing lock "refresh_cache-902200b2-f2ca-4979-961a-ec046d22d05c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1356.026787] env[69994]: DEBUG oslo_vmware.api [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242906, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061264} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.027029] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1356.027796] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d6dd684-a45f-4753-a722-e1017d25531b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.049572] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Reconfiguring VM instance instance-00000078 to attach disk [datastore2] 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3/3c433e3b-4c16-4cfc-a7d5-40e40b0906b3.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1356.052241] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8daf73fd-b61c-402c-84f0-800e0fe9b272 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.078837] env[69994]: DEBUG oslo_vmware.api [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1356.078837] env[69994]: value = "task-3242907" [ 1356.078837] env[69994]: _type = "Task" [ 1356.078837] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.087686] env[69994]: DEBUG oslo_vmware.api [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242907, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.089033] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1356.089033] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1356.090555] env[69994]: INFO nova.compute.claims [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1356.137219] env[69994]: DEBUG nova.network.neutron [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Updating instance_info_cache with network_info: [{"id": "06234607-a0e8-40a9-8a07-6f4502407064", "address": "fa:16:3e:4a:01:cb", "network": {"id": "24596b1b-4e9c-466d-85f9-ff79760278bd", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1747505141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.182", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "352ad5b68db1480eb657935e006d7dbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06234607-a0", "ovs_interfaceid": "06234607-a0e8-40a9-8a07-6f4502407064", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1356.339197] env[69994]: DEBUG nova.network.neutron [None req-93b85397-4f9d-4d20-bd24-bb27d0ed638a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Updating instance_info_cache with network_info: [{"id": "521061f8-5fe8-473b-ba95-6d17064efb80", "address": "fa:16:3e:ca:b0:f9", "network": {"id": "ac8cc640-01c3-4a64-8b69-1458ee2131a1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1685085861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38d5a89ed7c248c3be506ef12caf5f1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ddfb706a-add1-4e16-9ac4-d20b16a1df6d", "external-id": "nsx-vlan-transportzone-820", "segmentation_id": 820, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap521061f8-5f", "ovs_interfaceid": "521061f8-5fe8-473b-ba95-6d17064efb80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1356.588867] env[69994]: DEBUG oslo_vmware.api [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242907, 'name': ReconfigVM_Task, 'duration_secs': 0.304836} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.589157] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Reconfigured VM instance instance-00000078 to attach disk [datastore2] 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3/3c433e3b-4c16-4cfc-a7d5-40e40b0906b3.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1356.589797] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-42e2fe58-8fa8-4e81-8c9d-02afeb179629 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.596811] env[69994]: DEBUG oslo_vmware.api [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1356.596811] env[69994]: value = "task-3242908" [ 1356.596811] env[69994]: _type = "Task" [ 1356.596811] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.604809] env[69994]: DEBUG oslo_vmware.api [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242908, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.640668] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Releasing lock "refresh_cache-71ee4730-f0e5-4c71-8053-be9e73b702a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1356.842096] env[69994]: DEBUG oslo_concurrency.lockutils [None req-93b85397-4f9d-4d20-bd24-bb27d0ed638a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Releasing lock "refresh_cache-7e92935f-fc1f-4893-8f69-4b97e4729a7f" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1356.843077] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e679cf7a-d174-436b-92d9-9be745bdc402 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.849517] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-93b85397-4f9d-4d20-bd24-bb27d0ed638a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Resuming the VM {{(pid=69994) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1356.849741] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9ba8cef7-649b-494c-aa3a-373a38c7fa17 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.855515] env[69994]: DEBUG oslo_vmware.api [None req-93b85397-4f9d-4d20-bd24-bb27d0ed638a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1356.855515] env[69994]: value = "task-3242909" [ 1356.855515] env[69994]: _type = "Task" [ 1356.855515] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.862713] env[69994]: DEBUG oslo_vmware.api [None req-93b85397-4f9d-4d20-bd24-bb27d0ed638a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242909, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.948410] env[69994]: DEBUG nova.compute.manager [req-3aa18960-3a93-4080-bb2e-750c1467553f req-548abe4b-b3a9-432a-88a9-4028bfb8ab56 service nova] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Received event network-vif-unplugged-06234607-a0e8-40a9-8a07-6f4502407064 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1356.948781] env[69994]: DEBUG oslo_concurrency.lockutils [req-3aa18960-3a93-4080-bb2e-750c1467553f req-548abe4b-b3a9-432a-88a9-4028bfb8ab56 service nova] Acquiring lock "71ee4730-f0e5-4c71-8053-be9e73b702a4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1356.948876] env[69994]: DEBUG oslo_concurrency.lockutils [req-3aa18960-3a93-4080-bb2e-750c1467553f req-548abe4b-b3a9-432a-88a9-4028bfb8ab56 service nova] Lock "71ee4730-f0e5-4c71-8053-be9e73b702a4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1356.949075] env[69994]: DEBUG oslo_concurrency.lockutils [req-3aa18960-3a93-4080-bb2e-750c1467553f req-548abe4b-b3a9-432a-88a9-4028bfb8ab56 service nova] Lock "71ee4730-f0e5-4c71-8053-be9e73b702a4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1356.949407] env[69994]: DEBUG nova.compute.manager [req-3aa18960-3a93-4080-bb2e-750c1467553f req-548abe4b-b3a9-432a-88a9-4028bfb8ab56 service nova] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] No waiting events found dispatching network-vif-unplugged-06234607-a0e8-40a9-8a07-6f4502407064 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1356.949407] env[69994]: WARNING nova.compute.manager [req-3aa18960-3a93-4080-bb2e-750c1467553f req-548abe4b-b3a9-432a-88a9-4028bfb8ab56 service nova] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Received unexpected event network-vif-unplugged-06234607-a0e8-40a9-8a07-6f4502407064 for instance with vm_state shelved and task_state shelving_offloading. [ 1357.050492] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1357.051522] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4301918f-a255-4246-8be3-ce277643ede7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.059525] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1357.059791] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-18711b53-afa8-44ad-81a0-fed8c1ed9ae5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.109781] env[69994]: DEBUG oslo_vmware.api [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242908, 'name': Rename_Task, 'duration_secs': 0.140504} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.110057] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1357.110299] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8c1b293f-0f3a-4504-b060-24d6c558c668 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.115408] env[69994]: DEBUG oslo_vmware.api [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1357.115408] env[69994]: value = "task-3242911" [ 1357.115408] env[69994]: _type = "Task" [ 1357.115408] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.123685] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1357.123903] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1357.124100] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Deleting the datastore file [datastore2] 71ee4730-f0e5-4c71-8053-be9e73b702a4 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1357.127210] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-55d25510-a10b-4f8d-9796-8e663346b1da {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.128879] env[69994]: DEBUG oslo_vmware.api [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242911, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.133711] env[69994]: DEBUG oslo_vmware.api [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1357.133711] env[69994]: value = "task-3242912" [ 1357.133711] env[69994]: _type = "Task" [ 1357.133711] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.141288] env[69994]: DEBUG oslo_vmware.api [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242912, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.214784] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49fd2cc3-6595-42f5-9970-0c5367f0d9b2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.222537] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46fd94bf-1994-4735-aee0-92c0562dc735 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.226103] env[69994]: DEBUG oslo_concurrency.lockutils [None req-68383558-986a-429a-8820-1cc389744252 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "ef37ce64-2c26-4080-899a-6d9dbb5850c9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1357.226349] env[69994]: DEBUG oslo_concurrency.lockutils [None req-68383558-986a-429a-8820-1cc389744252 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "ef37ce64-2c26-4080-899a-6d9dbb5850c9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1357.226554] env[69994]: DEBUG oslo_concurrency.lockutils [None req-68383558-986a-429a-8820-1cc389744252 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "ef37ce64-2c26-4080-899a-6d9dbb5850c9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1357.226754] env[69994]: DEBUG oslo_concurrency.lockutils [None req-68383558-986a-429a-8820-1cc389744252 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "ef37ce64-2c26-4080-899a-6d9dbb5850c9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1357.226922] env[69994]: DEBUG oslo_concurrency.lockutils [None req-68383558-986a-429a-8820-1cc389744252 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "ef37ce64-2c26-4080-899a-6d9dbb5850c9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1357.229053] env[69994]: INFO nova.compute.manager [None req-68383558-986a-429a-8820-1cc389744252 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Terminating instance [ 1357.259989] env[69994]: DEBUG nova.compute.manager [None req-68383558-986a-429a-8820-1cc389744252 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1357.260259] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-68383558-986a-429a-8820-1cc389744252 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1357.261126] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d9eb177-c634-417d-b541-fc8265ac9f96 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.265113] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-603d6f00-0da9-4eda-ba8b-738426e85ebe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.276975] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eee7a6f6-4f16-4133-bb17-d55306bb41f0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.281542] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-68383558-986a-429a-8820-1cc389744252 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1357.282615] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2ba1145a-ccfb-486a-86d8-6b7389ba3ca4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.296096] env[69994]: DEBUG nova.compute.provider_tree [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1357.299573] env[69994]: DEBUG oslo_vmware.api [None req-68383558-986a-429a-8820-1cc389744252 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1357.299573] env[69994]: value = "task-3242913" [ 1357.299573] env[69994]: _type = "Task" [ 1357.299573] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.310882] env[69994]: DEBUG oslo_vmware.api [None req-68383558-986a-429a-8820-1cc389744252 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242913, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.366137] env[69994]: DEBUG oslo_vmware.api [None req-93b85397-4f9d-4d20-bd24-bb27d0ed638a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242909, 'name': PowerOnVM_Task} progress is 93%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.626667] env[69994]: DEBUG oslo_vmware.api [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242911, 'name': PowerOnVM_Task} progress is 81%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.645474] env[69994]: DEBUG oslo_vmware.api [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242912, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.217999} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.645766] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1357.645993] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1357.646185] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1357.666318] env[69994]: INFO nova.scheduler.client.report [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Deleted allocations for instance 71ee4730-f0e5-4c71-8053-be9e73b702a4 [ 1357.805317] env[69994]: DEBUG nova.scheduler.client.report [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1357.814141] env[69994]: DEBUG oslo_vmware.api [None req-68383558-986a-429a-8820-1cc389744252 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242913, 'name': PowerOffVM_Task} progress is 100%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.867174] env[69994]: DEBUG oslo_vmware.api [None req-93b85397-4f9d-4d20-bd24-bb27d0ed638a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242909, 'name': PowerOnVM_Task, 'duration_secs': 0.640669} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.867458] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-93b85397-4f9d-4d20-bd24-bb27d0ed638a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Resumed the VM {{(pid=69994) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1357.867669] env[69994]: DEBUG nova.compute.manager [None req-93b85397-4f9d-4d20-bd24-bb27d0ed638a tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1357.868493] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7b76c36-b314-4da1-b3cb-6fecd526cd5b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.126074] env[69994]: DEBUG oslo_vmware.api [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242911, 'name': PowerOnVM_Task, 'duration_secs': 0.799256} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.126397] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1358.126569] env[69994]: INFO nova.compute.manager [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Took 7.87 seconds to spawn the instance on the hypervisor. [ 1358.126751] env[69994]: DEBUG nova.compute.manager [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1358.127551] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f8fc664-48c0-4b28-97bf-eb7de5ab6180 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.171156] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1358.311011] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.222s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1358.311521] env[69994]: DEBUG nova.compute.manager [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1358.314099] env[69994]: DEBUG oslo_vmware.api [None req-68383558-986a-429a-8820-1cc389744252 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242913, 'name': PowerOffVM_Task, 'duration_secs': 0.620458} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.315013] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.144s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1358.315249] env[69994]: DEBUG nova.objects.instance [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lazy-loading 'resources' on Instance uuid 71ee4730-f0e5-4c71-8053-be9e73b702a4 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1358.316107] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-68383558-986a-429a-8820-1cc389744252 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1358.316280] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-68383558-986a-429a-8820-1cc389744252 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1358.316526] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8f82edae-eac3-4a60-bb84-819ec3ed6911 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.427308] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-68383558-986a-429a-8820-1cc389744252 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1358.427568] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-68383558-986a-429a-8820-1cc389744252 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1358.427761] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-68383558-986a-429a-8820-1cc389744252 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Deleting the datastore file [datastore1] ef37ce64-2c26-4080-899a-6d9dbb5850c9 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1358.428035] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3e2ee440-05e4-4a53-ac1b-019fb7662e60 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.433759] env[69994]: DEBUG oslo_vmware.api [None req-68383558-986a-429a-8820-1cc389744252 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for the task: (returnval){ [ 1358.433759] env[69994]: value = "task-3242915" [ 1358.433759] env[69994]: _type = "Task" [ 1358.433759] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.441733] env[69994]: DEBUG oslo_vmware.api [None req-68383558-986a-429a-8820-1cc389744252 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242915, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.647492] env[69994]: INFO nova.compute.manager [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Took 13.15 seconds to build instance. [ 1358.816920] env[69994]: DEBUG nova.compute.utils [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1358.818424] env[69994]: DEBUG nova.compute.manager [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1358.818556] env[69994]: DEBUG nova.network.neutron [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1358.820824] env[69994]: DEBUG nova.objects.instance [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lazy-loading 'numa_topology' on Instance uuid 71ee4730-f0e5-4c71-8053-be9e73b702a4 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1358.869953] env[69994]: DEBUG nova.policy [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aa110088642d455baaf060b5c9daaf5b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '70d05b502dfd4c5282872339c1e34d0c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1358.943695] env[69994]: DEBUG oslo_vmware.api [None req-68383558-986a-429a-8820-1cc389744252 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242915, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.021369] env[69994]: DEBUG nova.compute.manager [req-f2474ad0-c2fc-4627-b127-687a5e12011f req-278fc038-b5ae-41b2-9941-5e1dfbd64546 service nova] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Received event network-changed-06234607-a0e8-40a9-8a07-6f4502407064 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1359.022147] env[69994]: DEBUG nova.compute.manager [req-f2474ad0-c2fc-4627-b127-687a5e12011f req-278fc038-b5ae-41b2-9941-5e1dfbd64546 service nova] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Refreshing instance network info cache due to event network-changed-06234607-a0e8-40a9-8a07-6f4502407064. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1359.022147] env[69994]: DEBUG oslo_concurrency.lockutils [req-f2474ad0-c2fc-4627-b127-687a5e12011f req-278fc038-b5ae-41b2-9941-5e1dfbd64546 service nova] Acquiring lock "refresh_cache-71ee4730-f0e5-4c71-8053-be9e73b702a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1359.022320] env[69994]: DEBUG oslo_concurrency.lockutils [req-f2474ad0-c2fc-4627-b127-687a5e12011f req-278fc038-b5ae-41b2-9941-5e1dfbd64546 service nova] Acquired lock "refresh_cache-71ee4730-f0e5-4c71-8053-be9e73b702a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1359.023525] env[69994]: DEBUG nova.network.neutron [req-f2474ad0-c2fc-4627-b127-687a5e12011f req-278fc038-b5ae-41b2-9941-5e1dfbd64546 service nova] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Refreshing network info cache for port 06234607-a0e8-40a9-8a07-6f4502407064 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1359.149887] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b4f9f8cf-0947-4f9c-9e74-0385758e94ef tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "3c433e3b-4c16-4cfc-a7d5-40e40b0906b3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.658s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1359.191650] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b53fddce-2e86-4d52-b9aa-45e367b8a313 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "7e92935f-fc1f-4893-8f69-4b97e4729a7f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1359.191920] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b53fddce-2e86-4d52-b9aa-45e367b8a313 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "7e92935f-fc1f-4893-8f69-4b97e4729a7f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1359.192161] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b53fddce-2e86-4d52-b9aa-45e367b8a313 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "7e92935f-fc1f-4893-8f69-4b97e4729a7f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1359.192374] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b53fddce-2e86-4d52-b9aa-45e367b8a313 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "7e92935f-fc1f-4893-8f69-4b97e4729a7f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1359.192550] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b53fddce-2e86-4d52-b9aa-45e367b8a313 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "7e92935f-fc1f-4893-8f69-4b97e4729a7f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1359.194731] env[69994]: INFO nova.compute.manager [None req-b53fddce-2e86-4d52-b9aa-45e367b8a313 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Terminating instance [ 1359.322959] env[69994]: DEBUG nova.compute.manager [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1359.324695] env[69994]: DEBUG nova.objects.base [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Object Instance<71ee4730-f0e5-4c71-8053-be9e73b702a4> lazy-loaded attributes: resources,numa_topology {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1359.332043] env[69994]: DEBUG nova.network.neutron [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Successfully created port: 18df8f55-9b6c-4093-a622-a8129ca51490 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1359.422807] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5811907a-226a-41c2-bea4-b7fe8da219b2 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "3c433e3b-4c16-4cfc-a7d5-40e40b0906b3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1359.423231] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5811907a-226a-41c2-bea4-b7fe8da219b2 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "3c433e3b-4c16-4cfc-a7d5-40e40b0906b3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1359.423332] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5811907a-226a-41c2-bea4-b7fe8da219b2 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "3c433e3b-4c16-4cfc-a7d5-40e40b0906b3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1359.423987] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5811907a-226a-41c2-bea4-b7fe8da219b2 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "3c433e3b-4c16-4cfc-a7d5-40e40b0906b3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1359.423987] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5811907a-226a-41c2-bea4-b7fe8da219b2 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "3c433e3b-4c16-4cfc-a7d5-40e40b0906b3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1359.425620] env[69994]: INFO nova.compute.manager [None req-5811907a-226a-41c2-bea4-b7fe8da219b2 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Terminating instance [ 1359.428203] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e030f11-82a1-4870-8603-edc16743a8bc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.440153] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-176ca996-5b5b-404c-a4a5-657ab38ef367 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.448015] env[69994]: DEBUG oslo_vmware.api [None req-68383558-986a-429a-8820-1cc389744252 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Task: {'id': task-3242915, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.615552} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.471458] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-68383558-986a-429a-8820-1cc389744252 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1359.471661] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-68383558-986a-429a-8820-1cc389744252 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1359.471843] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-68383558-986a-429a-8820-1cc389744252 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1359.472011] env[69994]: INFO nova.compute.manager [None req-68383558-986a-429a-8820-1cc389744252 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Took 2.21 seconds to destroy the instance on the hypervisor. [ 1359.472258] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-68383558-986a-429a-8820-1cc389744252 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1359.472910] env[69994]: DEBUG nova.compute.manager [-] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1359.473021] env[69994]: DEBUG nova.network.neutron [-] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1359.479104] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ce269f8-621d-4943-8a04-e607289b701b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.484285] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5cbbf56-5fab-4e7e-bf51-5480f8f7bdd0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.500083] env[69994]: DEBUG nova.compute.provider_tree [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1359.698228] env[69994]: DEBUG nova.compute.manager [None req-b53fddce-2e86-4d52-b9aa-45e367b8a313 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1359.698474] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b53fddce-2e86-4d52-b9aa-45e367b8a313 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1359.700035] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f6c7761-d2b6-4f0d-baeb-18854b02548e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.707685] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b53fddce-2e86-4d52-b9aa-45e367b8a313 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1359.707917] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-05dd0f8b-a844-46ea-ac63-c7b437a875a5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.715709] env[69994]: DEBUG oslo_vmware.api [None req-b53fddce-2e86-4d52-b9aa-45e367b8a313 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1359.715709] env[69994]: value = "task-3242916" [ 1359.715709] env[69994]: _type = "Task" [ 1359.715709] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.725329] env[69994]: DEBUG oslo_vmware.api [None req-b53fddce-2e86-4d52-b9aa-45e367b8a313 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242916, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.933193] env[69994]: DEBUG nova.compute.manager [None req-5811907a-226a-41c2-bea4-b7fe8da219b2 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1359.933442] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5811907a-226a-41c2-bea4-b7fe8da219b2 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1359.934421] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3c83a6a-8353-4761-a52f-507a9f840807 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.946570] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5811907a-226a-41c2-bea4-b7fe8da219b2 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1359.949157] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2c758b4a-df50-40fc-8742-e84af66bd42f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.956637] env[69994]: DEBUG oslo_vmware.api [None req-5811907a-226a-41c2-bea4-b7fe8da219b2 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1359.956637] env[69994]: value = "task-3242917" [ 1359.956637] env[69994]: _type = "Task" [ 1359.956637] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.970329] env[69994]: DEBUG oslo_vmware.api [None req-5811907a-226a-41c2-bea4-b7fe8da219b2 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242917, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.005646] env[69994]: DEBUG nova.scheduler.client.report [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1360.124462] env[69994]: DEBUG nova.network.neutron [req-f2474ad0-c2fc-4627-b127-687a5e12011f req-278fc038-b5ae-41b2-9941-5e1dfbd64546 service nova] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Updated VIF entry in instance network info cache for port 06234607-a0e8-40a9-8a07-6f4502407064. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1360.124841] env[69994]: DEBUG nova.network.neutron [req-f2474ad0-c2fc-4627-b127-687a5e12011f req-278fc038-b5ae-41b2-9941-5e1dfbd64546 service nova] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Updating instance_info_cache with network_info: [{"id": "06234607-a0e8-40a9-8a07-6f4502407064", "address": "fa:16:3e:4a:01:cb", "network": {"id": "24596b1b-4e9c-466d-85f9-ff79760278bd", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1747505141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.182", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "352ad5b68db1480eb657935e006d7dbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap06234607-a0", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1360.127932] env[69994]: DEBUG nova.compute.manager [req-7efe6e27-5f62-4250-a65c-939e676af2a0 req-d9c25937-56ad-4e22-b0c0-8ea03b27d616 service nova] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Received event network-vif-deleted-75af8d87-ecba-45ba-867a-8c8e9c0389c9 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1360.128144] env[69994]: INFO nova.compute.manager [req-7efe6e27-5f62-4250-a65c-939e676af2a0 req-d9c25937-56ad-4e22-b0c0-8ea03b27d616 service nova] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Neutron deleted interface 75af8d87-ecba-45ba-867a-8c8e9c0389c9; detaching it from the instance and deleting it from the info cache [ 1360.128332] env[69994]: DEBUG nova.network.neutron [req-7efe6e27-5f62-4250-a65c-939e676af2a0 req-d9c25937-56ad-4e22-b0c0-8ea03b27d616 service nova] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1360.131904] env[69994]: DEBUG oslo_concurrency.lockutils [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquiring lock "71ee4730-f0e5-4c71-8053-be9e73b702a4" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1360.225384] env[69994]: DEBUG oslo_vmware.api [None req-b53fddce-2e86-4d52-b9aa-45e367b8a313 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242916, 'name': PowerOffVM_Task, 'duration_secs': 0.232126} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.225698] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b53fddce-2e86-4d52-b9aa-45e367b8a313 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1360.225848] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b53fddce-2e86-4d52-b9aa-45e367b8a313 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1360.226132] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d8c3af0a-c3e8-48cd-9934-79c614afa762 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.294414] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b53fddce-2e86-4d52-b9aa-45e367b8a313 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1360.294734] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b53fddce-2e86-4d52-b9aa-45e367b8a313 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1360.294935] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b53fddce-2e86-4d52-b9aa-45e367b8a313 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Deleting the datastore file [datastore1] 7e92935f-fc1f-4893-8f69-4b97e4729a7f {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1360.295538] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-166fab40-4d4e-4453-91de-694e3f4a0674 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.302790] env[69994]: DEBUG oslo_vmware.api [None req-b53fddce-2e86-4d52-b9aa-45e367b8a313 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for the task: (returnval){ [ 1360.302790] env[69994]: value = "task-3242919" [ 1360.302790] env[69994]: _type = "Task" [ 1360.302790] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.310849] env[69994]: DEBUG oslo_vmware.api [None req-b53fddce-2e86-4d52-b9aa-45e367b8a313 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242919, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.333321] env[69994]: DEBUG nova.compute.manager [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1360.359674] env[69994]: DEBUG nova.virt.hardware [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1360.359976] env[69994]: DEBUG nova.virt.hardware [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1360.360195] env[69994]: DEBUG nova.virt.hardware [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1360.360423] env[69994]: DEBUG nova.virt.hardware [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1360.360619] env[69994]: DEBUG nova.virt.hardware [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1360.360823] env[69994]: DEBUG nova.virt.hardware [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1360.361090] env[69994]: DEBUG nova.virt.hardware [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1360.361316] env[69994]: DEBUG nova.virt.hardware [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1360.361541] env[69994]: DEBUG nova.virt.hardware [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1360.361757] env[69994]: DEBUG nova.virt.hardware [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1360.361985] env[69994]: DEBUG nova.virt.hardware [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1360.362944] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fceafef8-5a9b-49aa-b97b-62721eef0cb2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.370686] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec12d2f3-6cd1-4419-bf0a-a6040e32a37f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.465772] env[69994]: DEBUG oslo_vmware.api [None req-5811907a-226a-41c2-bea4-b7fe8da219b2 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242917, 'name': PowerOffVM_Task, 'duration_secs': 0.170924} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.466040] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-5811907a-226a-41c2-bea4-b7fe8da219b2 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1360.466214] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5811907a-226a-41c2-bea4-b7fe8da219b2 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1360.466489] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c58fa6b3-c0b5-4e4f-adbc-af0046e234d8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.510170] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.195s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1360.534860] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5811907a-226a-41c2-bea4-b7fe8da219b2 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1360.535213] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5811907a-226a-41c2-bea4-b7fe8da219b2 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1360.535531] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-5811907a-226a-41c2-bea4-b7fe8da219b2 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Deleting the datastore file [datastore2] 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1360.535837] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-da41026f-b875-4b58-a3c8-2711bfc70c9b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.542815] env[69994]: DEBUG oslo_vmware.api [None req-5811907a-226a-41c2-bea4-b7fe8da219b2 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1360.542815] env[69994]: value = "task-3242921" [ 1360.542815] env[69994]: _type = "Task" [ 1360.542815] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.552678] env[69994]: DEBUG oslo_vmware.api [None req-5811907a-226a-41c2-bea4-b7fe8da219b2 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242921, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.608557] env[69994]: DEBUG nova.network.neutron [-] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1360.629878] env[69994]: DEBUG oslo_concurrency.lockutils [req-f2474ad0-c2fc-4627-b127-687a5e12011f req-278fc038-b5ae-41b2-9941-5e1dfbd64546 service nova] Releasing lock "refresh_cache-71ee4730-f0e5-4c71-8053-be9e73b702a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1360.630564] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a2920afc-5717-493f-b0a6-4af546d0b671 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.639480] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c6e8f77-ce2d-41c8-8467-0b216c516734 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.665953] env[69994]: DEBUG nova.compute.manager [req-7efe6e27-5f62-4250-a65c-939e676af2a0 req-d9c25937-56ad-4e22-b0c0-8ea03b27d616 service nova] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Detach interface failed, port_id=75af8d87-ecba-45ba-867a-8c8e9c0389c9, reason: Instance ef37ce64-2c26-4080-899a-6d9dbb5850c9 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1360.800725] env[69994]: DEBUG nova.network.neutron [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Successfully updated port: 18df8f55-9b6c-4093-a622-a8129ca51490 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1360.812874] env[69994]: DEBUG oslo_vmware.api [None req-b53fddce-2e86-4d52-b9aa-45e367b8a313 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Task: {'id': task-3242919, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.315646} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.813579] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b53fddce-2e86-4d52-b9aa-45e367b8a313 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1360.813921] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b53fddce-2e86-4d52-b9aa-45e367b8a313 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1360.814411] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b53fddce-2e86-4d52-b9aa-45e367b8a313 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1360.814721] env[69994]: INFO nova.compute.manager [None req-b53fddce-2e86-4d52-b9aa-45e367b8a313 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1360.814992] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b53fddce-2e86-4d52-b9aa-45e367b8a313 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1360.815885] env[69994]: DEBUG nova.compute.manager [-] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1360.815987] env[69994]: DEBUG nova.network.neutron [-] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1361.019155] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1d44e3a9-8641-4dd7-97ac-521bc3cf9ec4 tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lock "71ee4730-f0e5-4c71-8053-be9e73b702a4" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 22.818s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1361.019985] env[69994]: DEBUG oslo_concurrency.lockutils [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lock "71ee4730-f0e5-4c71-8053-be9e73b702a4" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.888s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1361.020196] env[69994]: INFO nova.compute.manager [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Unshelving [ 1361.056854] env[69994]: DEBUG oslo_vmware.api [None req-5811907a-226a-41c2-bea4-b7fe8da219b2 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242921, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.315703} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.058077] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-5811907a-226a-41c2-bea4-b7fe8da219b2 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1361.058315] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5811907a-226a-41c2-bea4-b7fe8da219b2 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1361.058502] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-5811907a-226a-41c2-bea4-b7fe8da219b2 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1361.058673] env[69994]: INFO nova.compute.manager [None req-5811907a-226a-41c2-bea4-b7fe8da219b2 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1361.058908] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5811907a-226a-41c2-bea4-b7fe8da219b2 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1361.060046] env[69994]: DEBUG nova.compute.manager [-] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1361.060195] env[69994]: DEBUG nova.network.neutron [-] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1361.064951] env[69994]: DEBUG nova.compute.manager [req-67fbdaf3-ac74-42e6-bcaf-fc779952dcae req-f53ed6a4-f08e-40c8-9b4b-f2294416b20c service nova] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Received event network-vif-plugged-18df8f55-9b6c-4093-a622-a8129ca51490 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1361.065233] env[69994]: DEBUG oslo_concurrency.lockutils [req-67fbdaf3-ac74-42e6-bcaf-fc779952dcae req-f53ed6a4-f08e-40c8-9b4b-f2294416b20c service nova] Acquiring lock "4c949f32-d395-4bcb-b998-d2f4a7741d00-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1361.065772] env[69994]: DEBUG oslo_concurrency.lockutils [req-67fbdaf3-ac74-42e6-bcaf-fc779952dcae req-f53ed6a4-f08e-40c8-9b4b-f2294416b20c service nova] Lock "4c949f32-d395-4bcb-b998-d2f4a7741d00-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1361.065957] env[69994]: DEBUG oslo_concurrency.lockutils [req-67fbdaf3-ac74-42e6-bcaf-fc779952dcae req-f53ed6a4-f08e-40c8-9b4b-f2294416b20c service nova] Lock "4c949f32-d395-4bcb-b998-d2f4a7741d00-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1361.066215] env[69994]: DEBUG nova.compute.manager [req-67fbdaf3-ac74-42e6-bcaf-fc779952dcae req-f53ed6a4-f08e-40c8-9b4b-f2294416b20c service nova] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] No waiting events found dispatching network-vif-plugged-18df8f55-9b6c-4093-a622-a8129ca51490 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1361.066443] env[69994]: WARNING nova.compute.manager [req-67fbdaf3-ac74-42e6-bcaf-fc779952dcae req-f53ed6a4-f08e-40c8-9b4b-f2294416b20c service nova] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Received unexpected event network-vif-plugged-18df8f55-9b6c-4093-a622-a8129ca51490 for instance with vm_state building and task_state spawning. [ 1361.066664] env[69994]: DEBUG nova.compute.manager [req-67fbdaf3-ac74-42e6-bcaf-fc779952dcae req-f53ed6a4-f08e-40c8-9b4b-f2294416b20c service nova] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Received event network-changed-18df8f55-9b6c-4093-a622-a8129ca51490 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1361.066891] env[69994]: DEBUG nova.compute.manager [req-67fbdaf3-ac74-42e6-bcaf-fc779952dcae req-f53ed6a4-f08e-40c8-9b4b-f2294416b20c service nova] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Refreshing instance network info cache due to event network-changed-18df8f55-9b6c-4093-a622-a8129ca51490. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1361.067147] env[69994]: DEBUG oslo_concurrency.lockutils [req-67fbdaf3-ac74-42e6-bcaf-fc779952dcae req-f53ed6a4-f08e-40c8-9b4b-f2294416b20c service nova] Acquiring lock "refresh_cache-4c949f32-d395-4bcb-b998-d2f4a7741d00" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1361.067350] env[69994]: DEBUG oslo_concurrency.lockutils [req-67fbdaf3-ac74-42e6-bcaf-fc779952dcae req-f53ed6a4-f08e-40c8-9b4b-f2294416b20c service nova] Acquired lock "refresh_cache-4c949f32-d395-4bcb-b998-d2f4a7741d00" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1361.067564] env[69994]: DEBUG nova.network.neutron [req-67fbdaf3-ac74-42e6-bcaf-fc779952dcae req-f53ed6a4-f08e-40c8-9b4b-f2294416b20c service nova] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Refreshing network info cache for port 18df8f55-9b6c-4093-a622-a8129ca51490 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1361.111825] env[69994]: INFO nova.compute.manager [-] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Took 1.64 seconds to deallocate network for instance. [ 1361.304000] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "refresh_cache-4c949f32-d395-4bcb-b998-d2f4a7741d00" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1361.601680] env[69994]: DEBUG nova.network.neutron [req-67fbdaf3-ac74-42e6-bcaf-fc779952dcae req-f53ed6a4-f08e-40c8-9b4b-f2294416b20c service nova] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1361.618902] env[69994]: DEBUG oslo_concurrency.lockutils [None req-68383558-986a-429a-8820-1cc389744252 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1361.619199] env[69994]: DEBUG oslo_concurrency.lockutils [None req-68383558-986a-429a-8820-1cc389744252 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1361.619398] env[69994]: DEBUG nova.objects.instance [None req-68383558-986a-429a-8820-1cc389744252 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lazy-loading 'resources' on Instance uuid ef37ce64-2c26-4080-899a-6d9dbb5850c9 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1361.677307] env[69994]: DEBUG nova.network.neutron [req-67fbdaf3-ac74-42e6-bcaf-fc779952dcae req-f53ed6a4-f08e-40c8-9b4b-f2294416b20c service nova] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1361.785974] env[69994]: DEBUG nova.network.neutron [-] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1361.793112] env[69994]: DEBUG nova.network.neutron [-] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1362.046415] env[69994]: DEBUG oslo_concurrency.lockutils [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1362.155803] env[69994]: DEBUG nova.compute.manager [req-f5bcb8ff-04c0-4f37-ab5f-258856434429 req-d13e28fd-46be-4c8f-a10f-b888a9b3c5c3 service nova] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Received event network-vif-deleted-3683f9a3-6187-49b8-85d9-0d4baaf1f783 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1362.179721] env[69994]: DEBUG oslo_concurrency.lockutils [req-67fbdaf3-ac74-42e6-bcaf-fc779952dcae req-f53ed6a4-f08e-40c8-9b4b-f2294416b20c service nova] Releasing lock "refresh_cache-4c949f32-d395-4bcb-b998-d2f4a7741d00" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1362.180182] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquired lock "refresh_cache-4c949f32-d395-4bcb-b998-d2f4a7741d00" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1362.180281] env[69994]: DEBUG nova.network.neutron [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1362.227356] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af1a1d7c-9ed6-4c4f-8c02-5e21f9145ba6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.234882] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1bd41e1-8423-4289-8cc0-8daff7a9a6cb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.264053] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c34168ef-cad6-4886-b09d-3ca1e9a5ecd2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.270623] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fc344e5-710f-4cda-8fb8-92ba43345bda {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.285036] env[69994]: DEBUG nova.compute.provider_tree [None req-68383558-986a-429a-8820-1cc389744252 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1362.289198] env[69994]: INFO nova.compute.manager [-] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Took 1.47 seconds to deallocate network for instance. [ 1362.294864] env[69994]: INFO nova.compute.manager [-] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Took 1.23 seconds to deallocate network for instance. [ 1362.711682] env[69994]: DEBUG nova.network.neutron [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1362.789224] env[69994]: DEBUG nova.scheduler.client.report [None req-68383558-986a-429a-8820-1cc389744252 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1362.794557] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b53fddce-2e86-4d52-b9aa-45e367b8a313 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1362.800467] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5811907a-226a-41c2-bea4-b7fe8da219b2 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1362.836614] env[69994]: DEBUG nova.network.neutron [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Updating instance_info_cache with network_info: [{"id": "18df8f55-9b6c-4093-a622-a8129ca51490", "address": "fa:16:3e:76:fe:e5", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18df8f55-9b", "ovs_interfaceid": "18df8f55-9b6c-4093-a622-a8129ca51490", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1363.089906] env[69994]: DEBUG nova.compute.manager [req-3d6ab5d7-2aaf-4bc9-8dec-ce5268d5aeec req-41e76468-753b-4af1-8868-e3fddac08efa service nova] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Received event network-vif-deleted-521061f8-5fe8-473b-ba95-6d17064efb80 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1363.293953] env[69994]: DEBUG oslo_concurrency.lockutils [None req-68383558-986a-429a-8820-1cc389744252 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.675s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1363.297252] env[69994]: DEBUG oslo_concurrency.lockutils [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.250s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1363.297561] env[69994]: DEBUG nova.objects.instance [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lazy-loading 'pci_requests' on Instance uuid 71ee4730-f0e5-4c71-8053-be9e73b702a4 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1363.311395] env[69994]: INFO nova.scheduler.client.report [None req-68383558-986a-429a-8820-1cc389744252 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Deleted allocations for instance ef37ce64-2c26-4080-899a-6d9dbb5850c9 [ 1363.339209] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Releasing lock "refresh_cache-4c949f32-d395-4bcb-b998-d2f4a7741d00" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1363.339209] env[69994]: DEBUG nova.compute.manager [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Instance network_info: |[{"id": "18df8f55-9b6c-4093-a622-a8129ca51490", "address": "fa:16:3e:76:fe:e5", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18df8f55-9b", "ovs_interfaceid": "18df8f55-9b6c-4093-a622-a8129ca51490", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1363.340073] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:76:fe:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a9d50784-eb90-48ae-a4ea-2125c52a50d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '18df8f55-9b6c-4093-a622-a8129ca51490', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1363.347806] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1363.348616] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1363.348790] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0a7fc0a7-0d0b-4a59-8a8b-f7841971e7b1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.369217] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1363.369217] env[69994]: value = "task-3242922" [ 1363.369217] env[69994]: _type = "Task" [ 1363.369217] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.376989] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242922, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.801408] env[69994]: DEBUG nova.objects.instance [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lazy-loading 'numa_topology' on Instance uuid 71ee4730-f0e5-4c71-8053-be9e73b702a4 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1363.819433] env[69994]: DEBUG oslo_concurrency.lockutils [None req-68383558-986a-429a-8820-1cc389744252 tempest-ServerActionsTestOtherA-60618035 tempest-ServerActionsTestOtherA-60618035-project-member] Lock "ef37ce64-2c26-4080-899a-6d9dbb5850c9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.593s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1363.879766] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242922, 'name': CreateVM_Task, 'duration_secs': 0.29232} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.879928] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1363.880592] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1363.880761] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1363.881091] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1363.881344] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f292a51-f534-4fa9-969d-5fa5b08e65cb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.885642] env[69994]: DEBUG oslo_vmware.api [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1363.885642] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5239a11a-9b1d-0862-aa8e-62a207c4de52" [ 1363.885642] env[69994]: _type = "Task" [ 1363.885642] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.892863] env[69994]: DEBUG oslo_vmware.api [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5239a11a-9b1d-0862-aa8e-62a207c4de52, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.304210] env[69994]: INFO nova.compute.claims [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1364.396047] env[69994]: DEBUG oslo_vmware.api [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5239a11a-9b1d-0862-aa8e-62a207c4de52, 'name': SearchDatastore_Task, 'duration_secs': 0.010011} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.396348] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1364.396575] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1364.396805] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1364.396945] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1364.397130] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1364.397389] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ff8dbd6e-366d-41a7-8471-b7c4bc551d5e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.406339] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1364.406511] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1364.407230] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99007749-9e79-4601-88e5-7acc5eabf622 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.412457] env[69994]: DEBUG oslo_vmware.api [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1364.412457] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5206308e-b665-0221-3266-f9809089e224" [ 1364.412457] env[69994]: _type = "Task" [ 1364.412457] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.424019] env[69994]: DEBUG oslo_vmware.api [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5206308e-b665-0221-3266-f9809089e224, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.923098] env[69994]: DEBUG oslo_vmware.api [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5206308e-b665-0221-3266-f9809089e224, 'name': SearchDatastore_Task, 'duration_secs': 0.009287} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.924027] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dce58ac8-d0cb-49b1-bb22-e7242d6c81bc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.931017] env[69994]: DEBUG oslo_vmware.api [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1364.931017] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52954402-35de-6551-b221-50149949e106" [ 1364.931017] env[69994]: _type = "Task" [ 1364.931017] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.937434] env[69994]: DEBUG oslo_vmware.api [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52954402-35de-6551-b221-50149949e106, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.408529] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a67d70f-5910-4d0e-aa34-e97430dc2ff2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.416112] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-171f9198-bf39-4082-9282-46c7c99b2684 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.448216] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12bcf0c7-e606-410e-8fa9-aeb4fcc74090 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.456131] env[69994]: DEBUG oslo_vmware.api [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52954402-35de-6551-b221-50149949e106, 'name': SearchDatastore_Task, 'duration_secs': 0.009283} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.458231] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1365.458502] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 4c949f32-d395-4bcb-b998-d2f4a7741d00/4c949f32-d395-4bcb-b998-d2f4a7741d00.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1365.458782] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d12c15c0-7adb-4f6f-bace-d5284c239734 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.461445] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cfc5fbf-78cf-468a-bcf4-1df5504de9da {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.478537] env[69994]: DEBUG oslo_vmware.api [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1365.478537] env[69994]: value = "task-3242923" [ 1365.478537] env[69994]: _type = "Task" [ 1365.478537] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.479076] env[69994]: DEBUG nova.compute.provider_tree [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1365.487771] env[69994]: DEBUG oslo_vmware.api [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242923, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.985704] env[69994]: DEBUG nova.scheduler.client.report [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1365.994220] env[69994]: DEBUG oslo_vmware.api [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242923, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.441203} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.994516] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 4c949f32-d395-4bcb-b998-d2f4a7741d00/4c949f32-d395-4bcb-b998-d2f4a7741d00.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1365.994736] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1365.994963] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3cf0afbc-b9d9-4590-aca8-5f13f5d8440b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.002594] env[69994]: DEBUG oslo_vmware.api [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1366.002594] env[69994]: value = "task-3242924" [ 1366.002594] env[69994]: _type = "Task" [ 1366.002594] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.010626] env[69994]: DEBUG oslo_vmware.api [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242924, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.490501] env[69994]: DEBUG oslo_concurrency.lockutils [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.194s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1366.492710] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b53fddce-2e86-4d52-b9aa-45e367b8a313 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.698s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1366.492943] env[69994]: DEBUG nova.objects.instance [None req-b53fddce-2e86-4d52-b9aa-45e367b8a313 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lazy-loading 'resources' on Instance uuid 7e92935f-fc1f-4893-8f69-4b97e4729a7f {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1366.511885] env[69994]: DEBUG oslo_vmware.api [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242924, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.057787} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.512150] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1366.512905] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bad4f52-85c0-428c-be51-fd7bb8ff8dd4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.534340] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] 4c949f32-d395-4bcb-b998-d2f4a7741d00/4c949f32-d395-4bcb-b998-d2f4a7741d00.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1366.535189] env[69994]: INFO nova.network.neutron [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Updating port 06234607-a0e8-40a9-8a07-6f4502407064 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1366.537107] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a466bf09-876e-42af-a44c-212f134f20f5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.556208] env[69994]: DEBUG oslo_vmware.api [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1366.556208] env[69994]: value = "task-3242925" [ 1366.556208] env[69994]: _type = "Task" [ 1366.556208] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.563490] env[69994]: DEBUG oslo_vmware.api [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242925, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.064970] env[69994]: DEBUG oslo_vmware.api [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242925, 'name': ReconfigVM_Task, 'duration_secs': 0.313268} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.067190] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Reconfigured VM instance instance-00000079 to attach disk [datastore1] 4c949f32-d395-4bcb-b998-d2f4a7741d00/4c949f32-d395-4bcb-b998-d2f4a7741d00.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1367.067960] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a8c9263a-a344-4d2d-8c77-8ad4e7cb39f3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.073986] env[69994]: DEBUG oslo_vmware.api [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1367.073986] env[69994]: value = "task-3242926" [ 1367.073986] env[69994]: _type = "Task" [ 1367.073986] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.083291] env[69994]: DEBUG oslo_vmware.api [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242926, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.086953] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-525debcf-9d88-458f-9eed-8c8efb18f9dc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.093204] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c724f37-0a82-43f5-a047-d0ad1328dc9c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.124521] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2d2a558-3e9b-4e7c-a931-818411c12665 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.131245] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-421f5d1e-9255-4775-822b-31ff4356e542 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.143874] env[69994]: DEBUG nova.compute.provider_tree [None req-b53fddce-2e86-4d52-b9aa-45e367b8a313 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1367.584316] env[69994]: DEBUG oslo_vmware.api [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242926, 'name': Rename_Task, 'duration_secs': 0.142677} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.584701] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1367.584948] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d97192f6-9096-419c-a0e4-5ce059f08586 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.592424] env[69994]: DEBUG oslo_vmware.api [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1367.592424] env[69994]: value = "task-3242927" [ 1367.592424] env[69994]: _type = "Task" [ 1367.592424] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.599739] env[69994]: DEBUG oslo_vmware.api [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242927, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.647222] env[69994]: DEBUG nova.scheduler.client.report [None req-b53fddce-2e86-4d52-b9aa-45e367b8a313 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1367.914173] env[69994]: DEBUG nova.compute.manager [req-95614e3d-0edf-411f-9e6c-82e4c2abc22f req-9a064bfa-7a72-4032-bec8-633a01205827 service nova] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Received event network-vif-plugged-06234607-a0e8-40a9-8a07-6f4502407064 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1367.914381] env[69994]: DEBUG oslo_concurrency.lockutils [req-95614e3d-0edf-411f-9e6c-82e4c2abc22f req-9a064bfa-7a72-4032-bec8-633a01205827 service nova] Acquiring lock "71ee4730-f0e5-4c71-8053-be9e73b702a4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1367.914733] env[69994]: DEBUG oslo_concurrency.lockutils [req-95614e3d-0edf-411f-9e6c-82e4c2abc22f req-9a064bfa-7a72-4032-bec8-633a01205827 service nova] Lock "71ee4730-f0e5-4c71-8053-be9e73b702a4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1367.915316] env[69994]: DEBUG oslo_concurrency.lockutils [req-95614e3d-0edf-411f-9e6c-82e4c2abc22f req-9a064bfa-7a72-4032-bec8-633a01205827 service nova] Lock "71ee4730-f0e5-4c71-8053-be9e73b702a4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1367.915673] env[69994]: DEBUG nova.compute.manager [req-95614e3d-0edf-411f-9e6c-82e4c2abc22f req-9a064bfa-7a72-4032-bec8-633a01205827 service nova] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] No waiting events found dispatching network-vif-plugged-06234607-a0e8-40a9-8a07-6f4502407064 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1367.915964] env[69994]: WARNING nova.compute.manager [req-95614e3d-0edf-411f-9e6c-82e4c2abc22f req-9a064bfa-7a72-4032-bec8-633a01205827 service nova] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Received unexpected event network-vif-plugged-06234607-a0e8-40a9-8a07-6f4502407064 for instance with vm_state shelved_offloaded and task_state spawning. [ 1367.998287] env[69994]: DEBUG oslo_concurrency.lockutils [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquiring lock "refresh_cache-71ee4730-f0e5-4c71-8053-be9e73b702a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1367.998287] env[69994]: DEBUG oslo_concurrency.lockutils [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquired lock "refresh_cache-71ee4730-f0e5-4c71-8053-be9e73b702a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1367.998287] env[69994]: DEBUG nova.network.neutron [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1368.103050] env[69994]: DEBUG oslo_vmware.api [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242927, 'name': PowerOnVM_Task, 'duration_secs': 0.413129} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.103050] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1368.103050] env[69994]: INFO nova.compute.manager [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Took 7.77 seconds to spawn the instance on the hypervisor. [ 1368.103050] env[69994]: DEBUG nova.compute.manager [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1368.103706] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffa8f8eb-5a25-466f-b7dc-63a36a6cf4b1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.151767] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b53fddce-2e86-4d52-b9aa-45e367b8a313 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.659s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1368.154724] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5811907a-226a-41c2-bea4-b7fe8da219b2 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.354s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1368.155152] env[69994]: DEBUG nova.objects.instance [None req-5811907a-226a-41c2-bea4-b7fe8da219b2 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lazy-loading 'resources' on Instance uuid 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1368.169144] env[69994]: INFO nova.scheduler.client.report [None req-b53fddce-2e86-4d52-b9aa-45e367b8a313 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Deleted allocations for instance 7e92935f-fc1f-4893-8f69-4b97e4729a7f [ 1368.619700] env[69994]: INFO nova.compute.manager [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Took 12.55 seconds to build instance. [ 1368.676322] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b53fddce-2e86-4d52-b9aa-45e367b8a313 tempest-ServerActionsTestJSON-701397409 tempest-ServerActionsTestJSON-701397409-project-member] Lock "7e92935f-fc1f-4893-8f69-4b97e4729a7f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.484s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1368.734139] env[69994]: DEBUG nova.network.neutron [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Updating instance_info_cache with network_info: [{"id": "06234607-a0e8-40a9-8a07-6f4502407064", "address": "fa:16:3e:4a:01:cb", "network": {"id": "24596b1b-4e9c-466d-85f9-ff79760278bd", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1747505141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.182", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "352ad5b68db1480eb657935e006d7dbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06234607-a0", "ovs_interfaceid": "06234607-a0e8-40a9-8a07-6f4502407064", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1368.749421] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b01a477-871f-4646-b4ac-9bebdd32de71 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.758567] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bc584e6-b1b1-410e-b158-ecff25fd524c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.791458] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cc396c7-0831-4045-8548-6a0feb2b49d6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.802107] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2970f303-8f36-4ff4-ab3c-b174bf70705c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.814903] env[69994]: DEBUG nova.compute.provider_tree [None req-5811907a-226a-41c2-bea4-b7fe8da219b2 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1369.121852] env[69994]: DEBUG oslo_concurrency.lockutils [None req-6f02367f-28ad-4652-a67c-468863f96125 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "4c949f32-d395-4bcb-b998-d2f4a7741d00" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.067s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1369.239116] env[69994]: DEBUG oslo_concurrency.lockutils [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Releasing lock "refresh_cache-71ee4730-f0e5-4c71-8053-be9e73b702a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1369.268533] env[69994]: DEBUG nova.virt.hardware [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='ee8adf5f2bdcc53fbd14f1b832c694ee',container_format='bare',created_at=2025-04-03T08:51:16Z,direct_url=,disk_format='vmdk',id=514afc33-9ab7-4777-a973-8c846c0ae8e9,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-131965547-shelved',owner='352ad5b68db1480eb657935e006d7dbb',properties=ImageMetaProps,protected=,size=31664128,status='active',tags=,updated_at=2025-04-03T08:51:31Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1369.268842] env[69994]: DEBUG nova.virt.hardware [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1369.269053] env[69994]: DEBUG nova.virt.hardware [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1369.269310] env[69994]: DEBUG nova.virt.hardware [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1369.269495] env[69994]: DEBUG nova.virt.hardware [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1369.269704] env[69994]: DEBUG nova.virt.hardware [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1369.269923] env[69994]: DEBUG nova.virt.hardware [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1369.270133] env[69994]: DEBUG nova.virt.hardware [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1369.270346] env[69994]: DEBUG nova.virt.hardware [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1369.270546] env[69994]: DEBUG nova.virt.hardware [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1369.270757] env[69994]: DEBUG nova.virt.hardware [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1369.271691] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf0cb627-f735-4e6b-83cb-4079d4556e17 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.280631] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32c12969-d38f-4c78-82cc-67416eb92433 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.296264] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4a:01:cb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24ec44b7-0acf-4ff9-8bb3-4641b74af7a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '06234607-a0e8-40a9-8a07-6f4502407064', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1369.303974] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1369.304227] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1369.304564] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d68ebefd-a4eb-4c21-83c1-37f516a89591 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.320248] env[69994]: DEBUG nova.scheduler.client.report [None req-5811907a-226a-41c2-bea4-b7fe8da219b2 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1369.330199] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1369.330199] env[69994]: value = "task-3242928" [ 1369.330199] env[69994]: _type = "Task" [ 1369.330199] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.340624] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242928, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.555324] env[69994]: DEBUG nova.compute.manager [req-52ab9877-cc26-4481-8b4c-93572bdcbdab req-1a239a6a-bd58-454d-902d-bc0b3cf0bb8e service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Received event network-changed-3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1369.556255] env[69994]: DEBUG nova.compute.manager [req-52ab9877-cc26-4481-8b4c-93572bdcbdab req-1a239a6a-bd58-454d-902d-bc0b3cf0bb8e service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Refreshing instance network info cache due to event network-changed-3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1369.556571] env[69994]: DEBUG oslo_concurrency.lockutils [req-52ab9877-cc26-4481-8b4c-93572bdcbdab req-1a239a6a-bd58-454d-902d-bc0b3cf0bb8e service nova] Acquiring lock "refresh_cache-902200b2-f2ca-4979-961a-ec046d22d05c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1369.556777] env[69994]: DEBUG oslo_concurrency.lockutils [req-52ab9877-cc26-4481-8b4c-93572bdcbdab req-1a239a6a-bd58-454d-902d-bc0b3cf0bb8e service nova] Acquired lock "refresh_cache-902200b2-f2ca-4979-961a-ec046d22d05c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1369.557261] env[69994]: DEBUG nova.network.neutron [req-52ab9877-cc26-4481-8b4c-93572bdcbdab req-1a239a6a-bd58-454d-902d-bc0b3cf0bb8e service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Refreshing network info cache for port 3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1369.828359] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5811907a-226a-41c2-bea4-b7fe8da219b2 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.673s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1369.840610] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242928, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.862027] env[69994]: INFO nova.scheduler.client.report [None req-5811907a-226a-41c2-bea4-b7fe8da219b2 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Deleted allocations for instance 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3 [ 1369.947780] env[69994]: DEBUG nova.compute.manager [req-d327d76a-169b-4bad-8569-aed2faf1f269 req-48a756ea-03a6-47e2-b1d0-abc92aab2105 service nova] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Received event network-changed-06234607-a0e8-40a9-8a07-6f4502407064 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1369.947780] env[69994]: DEBUG nova.compute.manager [req-d327d76a-169b-4bad-8569-aed2faf1f269 req-48a756ea-03a6-47e2-b1d0-abc92aab2105 service nova] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Refreshing instance network info cache due to event network-changed-06234607-a0e8-40a9-8a07-6f4502407064. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1369.947780] env[69994]: DEBUG oslo_concurrency.lockutils [req-d327d76a-169b-4bad-8569-aed2faf1f269 req-48a756ea-03a6-47e2-b1d0-abc92aab2105 service nova] Acquiring lock "refresh_cache-71ee4730-f0e5-4c71-8053-be9e73b702a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1369.947780] env[69994]: DEBUG oslo_concurrency.lockutils [req-d327d76a-169b-4bad-8569-aed2faf1f269 req-48a756ea-03a6-47e2-b1d0-abc92aab2105 service nova] Acquired lock "refresh_cache-71ee4730-f0e5-4c71-8053-be9e73b702a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1369.947780] env[69994]: DEBUG nova.network.neutron [req-d327d76a-169b-4bad-8569-aed2faf1f269 req-48a756ea-03a6-47e2-b1d0-abc92aab2105 service nova] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Refreshing network info cache for port 06234607-a0e8-40a9-8a07-6f4502407064 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1370.340768] env[69994]: DEBUG nova.network.neutron [req-52ab9877-cc26-4481-8b4c-93572bdcbdab req-1a239a6a-bd58-454d-902d-bc0b3cf0bb8e service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Updated VIF entry in instance network info cache for port 3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1370.341219] env[69994]: DEBUG nova.network.neutron [req-52ab9877-cc26-4481-8b4c-93572bdcbdab req-1a239a6a-bd58-454d-902d-bc0b3cf0bb8e service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Updating instance_info_cache with network_info: [{"id": "3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5", "address": "fa:16:3e:dc:3f:63", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cf0f0b5-84", "ovs_interfaceid": "3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1370.347371] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242928, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.375358] env[69994]: DEBUG oslo_concurrency.lockutils [None req-5811907a-226a-41c2-bea4-b7fe8da219b2 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "3c433e3b-4c16-4cfc-a7d5-40e40b0906b3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.952s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1370.766577] env[69994]: DEBUG nova.network.neutron [req-d327d76a-169b-4bad-8569-aed2faf1f269 req-48a756ea-03a6-47e2-b1d0-abc92aab2105 service nova] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Updated VIF entry in instance network info cache for port 06234607-a0e8-40a9-8a07-6f4502407064. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1370.767017] env[69994]: DEBUG nova.network.neutron [req-d327d76a-169b-4bad-8569-aed2faf1f269 req-48a756ea-03a6-47e2-b1d0-abc92aab2105 service nova] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Updating instance_info_cache with network_info: [{"id": "06234607-a0e8-40a9-8a07-6f4502407064", "address": "fa:16:3e:4a:01:cb", "network": {"id": "24596b1b-4e9c-466d-85f9-ff79760278bd", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1747505141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.182", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "352ad5b68db1480eb657935e006d7dbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24ec44b7-0acf-4ff9-8bb3-4641b74af7a7", "external-id": "nsx-vlan-transportzone-705", "segmentation_id": 705, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06234607-a0", "ovs_interfaceid": "06234607-a0e8-40a9-8a07-6f4502407064", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1370.840636] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242928, 'name': CreateVM_Task, 'duration_secs': 1.077603} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.840807] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1370.842095] env[69994]: DEBUG oslo_concurrency.lockutils [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/514afc33-9ab7-4777-a973-8c846c0ae8e9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1370.842277] env[69994]: DEBUG oslo_concurrency.lockutils [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquired lock "[datastore2] devstack-image-cache_base/514afc33-9ab7-4777-a973-8c846c0ae8e9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1370.842692] env[69994]: DEBUG oslo_concurrency.lockutils [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/514afc33-9ab7-4777-a973-8c846c0ae8e9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1370.843024] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f526274a-90e0-4c5f-8a4f-1efa4f074efe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.847603] env[69994]: DEBUG oslo_vmware.api [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1370.847603] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52b5333a-38d1-a852-455b-b3d78c59a298" [ 1370.847603] env[69994]: _type = "Task" [ 1370.847603] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.848091] env[69994]: DEBUG oslo_concurrency.lockutils [req-52ab9877-cc26-4481-8b4c-93572bdcbdab req-1a239a6a-bd58-454d-902d-bc0b3cf0bb8e service nova] Releasing lock "refresh_cache-902200b2-f2ca-4979-961a-ec046d22d05c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1370.855593] env[69994]: DEBUG oslo_vmware.api [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52b5333a-38d1-a852-455b-b3d78c59a298, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.269890] env[69994]: DEBUG oslo_concurrency.lockutils [req-d327d76a-169b-4bad-8569-aed2faf1f269 req-48a756ea-03a6-47e2-b1d0-abc92aab2105 service nova] Releasing lock "refresh_cache-71ee4730-f0e5-4c71-8053-be9e73b702a4" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1371.358934] env[69994]: DEBUG oslo_concurrency.lockutils [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Releasing lock "[datastore2] devstack-image-cache_base/514afc33-9ab7-4777-a973-8c846c0ae8e9" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1371.359312] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Processing image 514afc33-9ab7-4777-a973-8c846c0ae8e9 {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1371.359365] env[69994]: DEBUG oslo_concurrency.lockutils [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/514afc33-9ab7-4777-a973-8c846c0ae8e9/514afc33-9ab7-4777-a973-8c846c0ae8e9.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1371.359516] env[69994]: DEBUG oslo_concurrency.lockutils [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquired lock "[datastore2] devstack-image-cache_base/514afc33-9ab7-4777-a973-8c846c0ae8e9/514afc33-9ab7-4777-a973-8c846c0ae8e9.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1371.359693] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1371.359924] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e1f6d5c7-f97c-4584-be98-effb72c1b9fa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.370787] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1371.370954] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1371.371621] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6dde170-4863-44a6-ad92-8a47034d6cd7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.376580] env[69994]: DEBUG oslo_vmware.api [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1371.376580] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]528e11c0-c711-d1c5-4d27-f8a8f8d48081" [ 1371.376580] env[69994]: _type = "Task" [ 1371.376580] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1371.385836] env[69994]: DEBUG oslo_vmware.api [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]528e11c0-c711-d1c5-4d27-f8a8f8d48081, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.508586] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2ec7dd50-13f0-4657-a57c-3b908a273fc9 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "4f002725-3f15-4d10-a7ee-07132faf6266" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1371.508878] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2ec7dd50-13f0-4657-a57c-3b908a273fc9 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "4f002725-3f15-4d10-a7ee-07132faf6266" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1371.509107] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2ec7dd50-13f0-4657-a57c-3b908a273fc9 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "4f002725-3f15-4d10-a7ee-07132faf6266-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1371.509291] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2ec7dd50-13f0-4657-a57c-3b908a273fc9 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "4f002725-3f15-4d10-a7ee-07132faf6266-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1371.509461] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2ec7dd50-13f0-4657-a57c-3b908a273fc9 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "4f002725-3f15-4d10-a7ee-07132faf6266-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1371.512671] env[69994]: INFO nova.compute.manager [None req-2ec7dd50-13f0-4657-a57c-3b908a273fc9 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Terminating instance [ 1371.588648] env[69994]: DEBUG nova.compute.manager [req-d0e39239-48a3-4ee8-8735-fe70895308f0 req-0095ad23-bba9-408d-aff8-1a45bb64ea34 service nova] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Received event network-changed-18df8f55-9b6c-4093-a622-a8129ca51490 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1371.588801] env[69994]: DEBUG nova.compute.manager [req-d0e39239-48a3-4ee8-8735-fe70895308f0 req-0095ad23-bba9-408d-aff8-1a45bb64ea34 service nova] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Refreshing instance network info cache due to event network-changed-18df8f55-9b6c-4093-a622-a8129ca51490. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1371.589250] env[69994]: DEBUG oslo_concurrency.lockutils [req-d0e39239-48a3-4ee8-8735-fe70895308f0 req-0095ad23-bba9-408d-aff8-1a45bb64ea34 service nova] Acquiring lock "refresh_cache-4c949f32-d395-4bcb-b998-d2f4a7741d00" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1371.589374] env[69994]: DEBUG oslo_concurrency.lockutils [req-d0e39239-48a3-4ee8-8735-fe70895308f0 req-0095ad23-bba9-408d-aff8-1a45bb64ea34 service nova] Acquired lock "refresh_cache-4c949f32-d395-4bcb-b998-d2f4a7741d00" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1371.589532] env[69994]: DEBUG nova.network.neutron [req-d0e39239-48a3-4ee8-8735-fe70895308f0 req-0095ad23-bba9-408d-aff8-1a45bb64ea34 service nova] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Refreshing network info cache for port 18df8f55-9b6c-4093-a622-a8129ca51490 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1371.887253] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Preparing fetch location {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1371.887498] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Fetch image to [datastore2] OSTACK_IMG_7caefb45-4c7b-4faf-a3cc-9040eda86bec/OSTACK_IMG_7caefb45-4c7b-4faf-a3cc-9040eda86bec.vmdk {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1371.887688] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Downloading stream optimized image 514afc33-9ab7-4777-a973-8c846c0ae8e9 to [datastore2] OSTACK_IMG_7caefb45-4c7b-4faf-a3cc-9040eda86bec/OSTACK_IMG_7caefb45-4c7b-4faf-a3cc-9040eda86bec.vmdk on the data store datastore2 as vApp {{(pid=69994) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1371.887861] env[69994]: DEBUG nova.virt.vmwareapi.images [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Downloading image file data 514afc33-9ab7-4777-a973-8c846c0ae8e9 to the ESX as VM named 'OSTACK_IMG_7caefb45-4c7b-4faf-a3cc-9040eda86bec' {{(pid=69994) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1371.962077] env[69994]: DEBUG oslo_vmware.rw_handles [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1371.962077] env[69994]: value = "resgroup-9" [ 1371.962077] env[69994]: _type = "ResourcePool" [ 1371.962077] env[69994]: }. {{(pid=69994) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1371.962421] env[69994]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-b8418e3b-547a-4959-aa14-a0888b022f41 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.983368] env[69994]: DEBUG oslo_vmware.rw_handles [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lease: (returnval){ [ 1371.983368] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52a2b56d-165f-0125-45f6-0ed5a06da739" [ 1371.983368] env[69994]: _type = "HttpNfcLease" [ 1371.983368] env[69994]: } obtained for vApp import into resource pool (val){ [ 1371.983368] env[69994]: value = "resgroup-9" [ 1371.983368] env[69994]: _type = "ResourcePool" [ 1371.983368] env[69994]: }. {{(pid=69994) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1371.983671] env[69994]: DEBUG oslo_vmware.api [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the lease: (returnval){ [ 1371.983671] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52a2b56d-165f-0125-45f6-0ed5a06da739" [ 1371.983671] env[69994]: _type = "HttpNfcLease" [ 1371.983671] env[69994]: } to be ready. {{(pid=69994) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1371.989464] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1371.989464] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52a2b56d-165f-0125-45f6-0ed5a06da739" [ 1371.989464] env[69994]: _type = "HttpNfcLease" [ 1371.989464] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1372.016564] env[69994]: DEBUG nova.compute.manager [None req-2ec7dd50-13f0-4657-a57c-3b908a273fc9 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1372.016780] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2ec7dd50-13f0-4657-a57c-3b908a273fc9 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1372.017674] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07596915-4a41-4695-95df-f88c363e2ea8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.024876] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ec7dd50-13f0-4657-a57c-3b908a273fc9 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1372.025115] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6de0f933-e4dd-4159-952f-4aee9dbd3141 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.030446] env[69994]: DEBUG oslo_vmware.api [None req-2ec7dd50-13f0-4657-a57c-3b908a273fc9 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1372.030446] env[69994]: value = "task-3242930" [ 1372.030446] env[69994]: _type = "Task" [ 1372.030446] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.037808] env[69994]: DEBUG oslo_vmware.api [None req-2ec7dd50-13f0-4657-a57c-3b908a273fc9 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242930, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.301814] env[69994]: DEBUG nova.network.neutron [req-d0e39239-48a3-4ee8-8735-fe70895308f0 req-0095ad23-bba9-408d-aff8-1a45bb64ea34 service nova] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Updated VIF entry in instance network info cache for port 18df8f55-9b6c-4093-a622-a8129ca51490. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1372.301814] env[69994]: DEBUG nova.network.neutron [req-d0e39239-48a3-4ee8-8735-fe70895308f0 req-0095ad23-bba9-408d-aff8-1a45bb64ea34 service nova] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Updating instance_info_cache with network_info: [{"id": "18df8f55-9b6c-4093-a622-a8129ca51490", "address": "fa:16:3e:76:fe:e5", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18df8f55-9b", "ovs_interfaceid": "18df8f55-9b6c-4093-a622-a8129ca51490", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1372.492203] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1372.492203] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52a2b56d-165f-0125-45f6-0ed5a06da739" [ 1372.492203] env[69994]: _type = "HttpNfcLease" [ 1372.492203] env[69994]: } is initializing. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1372.539668] env[69994]: DEBUG oslo_vmware.api [None req-2ec7dd50-13f0-4657-a57c-3b908a273fc9 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242930, 'name': PowerOffVM_Task, 'duration_secs': 0.201323} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1372.540777] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ec7dd50-13f0-4657-a57c-3b908a273fc9 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1372.540777] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2ec7dd50-13f0-4657-a57c-3b908a273fc9 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1372.540777] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-66d0040a-abcc-4c43-b600-ef9e2b68b086 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.606881] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2ec7dd50-13f0-4657-a57c-3b908a273fc9 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1372.606881] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2ec7dd50-13f0-4657-a57c-3b908a273fc9 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1372.606881] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ec7dd50-13f0-4657-a57c-3b908a273fc9 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Deleting the datastore file [datastore2] 4f002725-3f15-4d10-a7ee-07132faf6266 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1372.607193] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0d763372-6ece-4ebf-85d1-51cd790bee8f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.612764] env[69994]: DEBUG oslo_vmware.api [None req-2ec7dd50-13f0-4657-a57c-3b908a273fc9 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1372.612764] env[69994]: value = "task-3242932" [ 1372.612764] env[69994]: _type = "Task" [ 1372.612764] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.622694] env[69994]: DEBUG oslo_vmware.api [None req-2ec7dd50-13f0-4657-a57c-3b908a273fc9 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242932, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.804766] env[69994]: DEBUG oslo_concurrency.lockutils [req-d0e39239-48a3-4ee8-8735-fe70895308f0 req-0095ad23-bba9-408d-aff8-1a45bb64ea34 service nova] Releasing lock "refresh_cache-4c949f32-d395-4bcb-b998-d2f4a7741d00" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1372.805052] env[69994]: DEBUG nova.compute.manager [req-d0e39239-48a3-4ee8-8735-fe70895308f0 req-0095ad23-bba9-408d-aff8-1a45bb64ea34 service nova] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Received event network-changed-18df8f55-9b6c-4093-a622-a8129ca51490 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1372.805230] env[69994]: DEBUG nova.compute.manager [req-d0e39239-48a3-4ee8-8735-fe70895308f0 req-0095ad23-bba9-408d-aff8-1a45bb64ea34 service nova] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Refreshing instance network info cache due to event network-changed-18df8f55-9b6c-4093-a622-a8129ca51490. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1372.805449] env[69994]: DEBUG oslo_concurrency.lockutils [req-d0e39239-48a3-4ee8-8735-fe70895308f0 req-0095ad23-bba9-408d-aff8-1a45bb64ea34 service nova] Acquiring lock "refresh_cache-4c949f32-d395-4bcb-b998-d2f4a7741d00" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1372.805605] env[69994]: DEBUG oslo_concurrency.lockutils [req-d0e39239-48a3-4ee8-8735-fe70895308f0 req-0095ad23-bba9-408d-aff8-1a45bb64ea34 service nova] Acquired lock "refresh_cache-4c949f32-d395-4bcb-b998-d2f4a7741d00" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1372.805763] env[69994]: DEBUG nova.network.neutron [req-d0e39239-48a3-4ee8-8735-fe70895308f0 req-0095ad23-bba9-408d-aff8-1a45bb64ea34 service nova] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Refreshing network info cache for port 18df8f55-9b6c-4093-a622-a8129ca51490 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1372.993126] env[69994]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1372.993126] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52a2b56d-165f-0125-45f6-0ed5a06da739" [ 1372.993126] env[69994]: _type = "HttpNfcLease" [ 1372.993126] env[69994]: } is ready. {{(pid=69994) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1372.993442] env[69994]: DEBUG oslo_vmware.rw_handles [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1372.993442] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52a2b56d-165f-0125-45f6-0ed5a06da739" [ 1372.993442] env[69994]: _type = "HttpNfcLease" [ 1372.993442] env[69994]: }. {{(pid=69994) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1372.994152] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b41a45b0-6b15-4400-82ea-88ad54dc447a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.001370] env[69994]: DEBUG oslo_vmware.rw_handles [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52cde1ac-5a41-b7f9-4bce-e5336c4ffebd/disk-0.vmdk from lease info. {{(pid=69994) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1373.001550] env[69994]: DEBUG oslo_vmware.rw_handles [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Creating HTTP connection to write to file with size = 31664128 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52cde1ac-5a41-b7f9-4bce-e5336c4ffebd/disk-0.vmdk. {{(pid=69994) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1373.066029] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-171bde41-de58-4b3a-ae4b-7b7fc1a2cc64 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.122122] env[69994]: DEBUG oslo_vmware.api [None req-2ec7dd50-13f0-4657-a57c-3b908a273fc9 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242932, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160567} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.123124] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ec7dd50-13f0-4657-a57c-3b908a273fc9 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1373.123322] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2ec7dd50-13f0-4657-a57c-3b908a273fc9 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1373.123503] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2ec7dd50-13f0-4657-a57c-3b908a273fc9 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1373.123687] env[69994]: INFO nova.compute.manager [None req-2ec7dd50-13f0-4657-a57c-3b908a273fc9 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1373.123922] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2ec7dd50-13f0-4657-a57c-3b908a273fc9 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1373.124121] env[69994]: DEBUG nova.compute.manager [-] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1373.124222] env[69994]: DEBUG nova.network.neutron [-] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1373.415092] env[69994]: DEBUG nova.compute.manager [req-e69f5a96-9c17-4de0-82d9-dd5f5972560b req-3ff19f2d-94bb-4e8f-abc8-ff4e30a095b0 service nova] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Received event network-vif-deleted-f2ed70aa-9d71-4b1f-9f7a-805aae4daa5f {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1373.415412] env[69994]: INFO nova.compute.manager [req-e69f5a96-9c17-4de0-82d9-dd5f5972560b req-3ff19f2d-94bb-4e8f-abc8-ff4e30a095b0 service nova] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Neutron deleted interface f2ed70aa-9d71-4b1f-9f7a-805aae4daa5f; detaching it from the instance and deleting it from the info cache [ 1373.415733] env[69994]: DEBUG nova.network.neutron [req-e69f5a96-9c17-4de0-82d9-dd5f5972560b req-3ff19f2d-94bb-4e8f-abc8-ff4e30a095b0 service nova] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1373.558407] env[69994]: DEBUG nova.network.neutron [req-d0e39239-48a3-4ee8-8735-fe70895308f0 req-0095ad23-bba9-408d-aff8-1a45bb64ea34 service nova] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Updated VIF entry in instance network info cache for port 18df8f55-9b6c-4093-a622-a8129ca51490. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1373.558763] env[69994]: DEBUG nova.network.neutron [req-d0e39239-48a3-4ee8-8735-fe70895308f0 req-0095ad23-bba9-408d-aff8-1a45bb64ea34 service nova] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Updating instance_info_cache with network_info: [{"id": "18df8f55-9b6c-4093-a622-a8129ca51490", "address": "fa:16:3e:76:fe:e5", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18df8f55-9b", "ovs_interfaceid": "18df8f55-9b6c-4093-a622-a8129ca51490", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1373.899626] env[69994]: DEBUG nova.network.neutron [-] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1373.919253] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cf32402e-232e-4256-9e82-455dff2d50d3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.929185] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcb50de0-f265-467a-ac88-60cfefb8972e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.959650] env[69994]: DEBUG nova.compute.manager [req-e69f5a96-9c17-4de0-82d9-dd5f5972560b req-3ff19f2d-94bb-4e8f-abc8-ff4e30a095b0 service nova] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Detach interface failed, port_id=f2ed70aa-9d71-4b1f-9f7a-805aae4daa5f, reason: Instance 4f002725-3f15-4d10-a7ee-07132faf6266 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1374.061281] env[69994]: DEBUG oslo_concurrency.lockutils [req-d0e39239-48a3-4ee8-8735-fe70895308f0 req-0095ad23-bba9-408d-aff8-1a45bb64ea34 service nova] Releasing lock "refresh_cache-4c949f32-d395-4bcb-b998-d2f4a7741d00" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1374.061538] env[69994]: DEBUG nova.compute.manager [req-d0e39239-48a3-4ee8-8735-fe70895308f0 req-0095ad23-bba9-408d-aff8-1a45bb64ea34 service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Received event network-changed-3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1374.061710] env[69994]: DEBUG nova.compute.manager [req-d0e39239-48a3-4ee8-8735-fe70895308f0 req-0095ad23-bba9-408d-aff8-1a45bb64ea34 service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Refreshing instance network info cache due to event network-changed-3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1374.061922] env[69994]: DEBUG oslo_concurrency.lockutils [req-d0e39239-48a3-4ee8-8735-fe70895308f0 req-0095ad23-bba9-408d-aff8-1a45bb64ea34 service nova] Acquiring lock "refresh_cache-902200b2-f2ca-4979-961a-ec046d22d05c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1374.062086] env[69994]: DEBUG oslo_concurrency.lockutils [req-d0e39239-48a3-4ee8-8735-fe70895308f0 req-0095ad23-bba9-408d-aff8-1a45bb64ea34 service nova] Acquired lock "refresh_cache-902200b2-f2ca-4979-961a-ec046d22d05c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1374.062255] env[69994]: DEBUG nova.network.neutron [req-d0e39239-48a3-4ee8-8735-fe70895308f0 req-0095ad23-bba9-408d-aff8-1a45bb64ea34 service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Refreshing network info cache for port 3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1374.351198] env[69994]: DEBUG oslo_vmware.rw_handles [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Completed reading data from the image iterator. {{(pid=69994) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1374.351436] env[69994]: DEBUG oslo_vmware.rw_handles [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52cde1ac-5a41-b7f9-4bce-e5336c4ffebd/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1374.352367] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7971c7a4-2989-48c3-bae5-5945b326f257 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.359557] env[69994]: DEBUG oslo_vmware.rw_handles [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52cde1ac-5a41-b7f9-4bce-e5336c4ffebd/disk-0.vmdk is in state: ready. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1374.359802] env[69994]: DEBUG oslo_vmware.rw_handles [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52cde1ac-5a41-b7f9-4bce-e5336c4ffebd/disk-0.vmdk. {{(pid=69994) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1374.360060] env[69994]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-11cfc7cd-f5e3-4e49-a91c-58d718d875b0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.402486] env[69994]: INFO nova.compute.manager [-] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Took 1.28 seconds to deallocate network for instance. [ 1374.767791] env[69994]: DEBUG nova.network.neutron [req-d0e39239-48a3-4ee8-8735-fe70895308f0 req-0095ad23-bba9-408d-aff8-1a45bb64ea34 service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Updated VIF entry in instance network info cache for port 3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1374.768173] env[69994]: DEBUG nova.network.neutron [req-d0e39239-48a3-4ee8-8735-fe70895308f0 req-0095ad23-bba9-408d-aff8-1a45bb64ea34 service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Updating instance_info_cache with network_info: [{"id": "3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5", "address": "fa:16:3e:dc:3f:63", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cf0f0b5-84", "ovs_interfaceid": "3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1374.909199] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2ec7dd50-13f0-4657-a57c-3b908a273fc9 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1374.909446] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2ec7dd50-13f0-4657-a57c-3b908a273fc9 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1374.909624] env[69994]: DEBUG nova.objects.instance [None req-2ec7dd50-13f0-4657-a57c-3b908a273fc9 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lazy-loading 'resources' on Instance uuid 4f002725-3f15-4d10-a7ee-07132faf6266 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1375.055565] env[69994]: DEBUG oslo_vmware.rw_handles [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52cde1ac-5a41-b7f9-4bce-e5336c4ffebd/disk-0.vmdk. {{(pid=69994) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1375.055797] env[69994]: INFO nova.virt.vmwareapi.images [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Downloaded image file data 514afc33-9ab7-4777-a973-8c846c0ae8e9 [ 1375.056679] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc8f0b63-5829-4667-88d3-e4dccddd5c0c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.072100] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c63250e2-2844-4ba2-a7d9-f8a4ffbff3dd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.240178] env[69994]: INFO nova.virt.vmwareapi.images [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] The imported VM was unregistered [ 1375.243042] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Caching image {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1375.243311] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Creating directory with path [datastore2] devstack-image-cache_base/514afc33-9ab7-4777-a973-8c846c0ae8e9 {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1375.243612] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bc633668-055a-4717-8c94-bdb973caf7aa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.254077] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Created directory with path [datastore2] devstack-image-cache_base/514afc33-9ab7-4777-a973-8c846c0ae8e9 {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1375.254280] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_7caefb45-4c7b-4faf-a3cc-9040eda86bec/OSTACK_IMG_7caefb45-4c7b-4faf-a3cc-9040eda86bec.vmdk to [datastore2] devstack-image-cache_base/514afc33-9ab7-4777-a973-8c846c0ae8e9/514afc33-9ab7-4777-a973-8c846c0ae8e9.vmdk. {{(pid=69994) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1375.254558] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-4b143578-bd14-4dcf-9f4b-afe7c41e7d24 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.262242] env[69994]: DEBUG oslo_vmware.api [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1375.262242] env[69994]: value = "task-3242934" [ 1375.262242] env[69994]: _type = "Task" [ 1375.262242] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.269531] env[69994]: DEBUG oslo_vmware.api [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242934, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.271073] env[69994]: DEBUG oslo_concurrency.lockutils [req-d0e39239-48a3-4ee8-8735-fe70895308f0 req-0095ad23-bba9-408d-aff8-1a45bb64ea34 service nova] Releasing lock "refresh_cache-902200b2-f2ca-4979-961a-ec046d22d05c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1375.509793] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbbba09b-abad-4819-ad4c-db530678763a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.520552] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd5032a6-61cb-4523-a242-1b4ae55647da {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.552256] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7741e1d4-7fee-4443-b830-2379eb15d2cb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.559935] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c0190b6-a6f3-43ba-a381-1e0d6289a23b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.576428] env[69994]: DEBUG nova.compute.provider_tree [None req-2ec7dd50-13f0-4657-a57c-3b908a273fc9 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1375.774388] env[69994]: DEBUG oslo_vmware.api [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242934, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.079789] env[69994]: DEBUG nova.scheduler.client.report [None req-2ec7dd50-13f0-4657-a57c-3b908a273fc9 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1376.273867] env[69994]: DEBUG oslo_vmware.api [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242934, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.585026] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2ec7dd50-13f0-4657-a57c-3b908a273fc9 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.675s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1376.604772] env[69994]: INFO nova.scheduler.client.report [None req-2ec7dd50-13f0-4657-a57c-3b908a273fc9 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Deleted allocations for instance 4f002725-3f15-4d10-a7ee-07132faf6266 [ 1376.776088] env[69994]: DEBUG oslo_vmware.api [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242934, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.113046] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2ec7dd50-13f0-4657-a57c-3b908a273fc9 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "4f002725-3f15-4d10-a7ee-07132faf6266" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.604s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1377.277144] env[69994]: DEBUG oslo_vmware.api [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242934, 'name': MoveVirtualDisk_Task} progress is 88%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.776153] env[69994]: DEBUG oslo_vmware.api [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242934, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.333148} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.776489] env[69994]: INFO nova.virt.vmwareapi.ds_util [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_7caefb45-4c7b-4faf-a3cc-9040eda86bec/OSTACK_IMG_7caefb45-4c7b-4faf-a3cc-9040eda86bec.vmdk to [datastore2] devstack-image-cache_base/514afc33-9ab7-4777-a973-8c846c0ae8e9/514afc33-9ab7-4777-a973-8c846c0ae8e9.vmdk. [ 1377.776612] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Cleaning up location [datastore2] OSTACK_IMG_7caefb45-4c7b-4faf-a3cc-9040eda86bec {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1377.776776] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_7caefb45-4c7b-4faf-a3cc-9040eda86bec {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1377.777038] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eddcba4e-46c0-4746-9cd2-4a79155cca3c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.783625] env[69994]: DEBUG oslo_vmware.api [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1377.783625] env[69994]: value = "task-3242935" [ 1377.783625] env[69994]: _type = "Task" [ 1377.783625] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.791161] env[69994]: DEBUG oslo_vmware.api [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242935, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.292970] env[69994]: DEBUG oslo_vmware.api [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242935, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134119} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.293377] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1378.293561] env[69994]: DEBUG oslo_concurrency.lockutils [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Releasing lock "[datastore2] devstack-image-cache_base/514afc33-9ab7-4777-a973-8c846c0ae8e9/514afc33-9ab7-4777-a973-8c846c0ae8e9.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1378.293843] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/514afc33-9ab7-4777-a973-8c846c0ae8e9/514afc33-9ab7-4777-a973-8c846c0ae8e9.vmdk to [datastore2] 71ee4730-f0e5-4c71-8053-be9e73b702a4/71ee4730-f0e5-4c71-8053-be9e73b702a4.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1378.294168] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-14efac14-4418-49ee-b3d8-41c1705eb112 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.300655] env[69994]: DEBUG oslo_vmware.api [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1378.300655] env[69994]: value = "task-3242936" [ 1378.300655] env[69994]: _type = "Task" [ 1378.300655] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.308032] env[69994]: DEBUG oslo_vmware.api [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242936, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.554569] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "12a3162f-161e-4dfb-abd6-c77ce9e1785e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1378.554863] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "12a3162f-161e-4dfb-abd6-c77ce9e1785e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1378.813083] env[69994]: DEBUG oslo_vmware.api [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242936, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.058039] env[69994]: DEBUG nova.compute.manager [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1379.314377] env[69994]: DEBUG oslo_vmware.api [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242936, 'name': CopyVirtualDisk_Task} progress is 46%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.580150] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1379.580454] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1379.581992] env[69994]: INFO nova.compute.claims [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1379.814692] env[69994]: DEBUG oslo_vmware.api [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242936, 'name': CopyVirtualDisk_Task} progress is 69%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.316120] env[69994]: DEBUG oslo_vmware.api [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242936, 'name': CopyVirtualDisk_Task} progress is 94%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.607460] env[69994]: DEBUG nova.scheduler.client.report [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Refreshing inventories for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1380.621120] env[69994]: DEBUG nova.scheduler.client.report [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Updating ProviderTree inventory for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1380.621354] env[69994]: DEBUG nova.compute.provider_tree [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1380.632389] env[69994]: DEBUG nova.scheduler.client.report [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Refreshing aggregate associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1380.650501] env[69994]: DEBUG nova.scheduler.client.report [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Refreshing trait associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1380.718774] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ede0b69-8a01-49bd-9741-0f87bd0983ca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.726650] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-984252d3-ece4-4c63-b41d-d59a89b8c3d6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.757114] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b65284e-b733-4207-be1e-b70ce4aea074 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.764641] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80db1ae3-cb86-453b-9d8b-94448bed01e5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.779309] env[69994]: DEBUG nova.compute.provider_tree [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1380.813985] env[69994]: DEBUG oslo_vmware.api [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242936, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.255196} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.814269] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/514afc33-9ab7-4777-a973-8c846c0ae8e9/514afc33-9ab7-4777-a973-8c846c0ae8e9.vmdk to [datastore2] 71ee4730-f0e5-4c71-8053-be9e73b702a4/71ee4730-f0e5-4c71-8053-be9e73b702a4.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1380.815111] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e1d5278-71da-44be-8a83-f77d7e7b2edd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.837899] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Reconfiguring VM instance instance-00000072 to attach disk [datastore2] 71ee4730-f0e5-4c71-8053-be9e73b702a4/71ee4730-f0e5-4c71-8053-be9e73b702a4.vmdk or device None with type streamOptimized {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1380.838152] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ac22f7af-9844-436a-8dd6-d2c069706572 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.857356] env[69994]: DEBUG oslo_vmware.api [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1380.857356] env[69994]: value = "task-3242937" [ 1380.857356] env[69994]: _type = "Task" [ 1380.857356] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.865546] env[69994]: DEBUG oslo_vmware.api [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242937, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.282759] env[69994]: DEBUG nova.scheduler.client.report [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1381.367113] env[69994]: DEBUG oslo_vmware.api [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242937, 'name': ReconfigVM_Task, 'duration_secs': 0.323399} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.367373] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Reconfigured VM instance instance-00000072 to attach disk [datastore2] 71ee4730-f0e5-4c71-8053-be9e73b702a4/71ee4730-f0e5-4c71-8053-be9e73b702a4.vmdk or device None with type streamOptimized {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1381.367974] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ffa125d3-4040-4770-9c9b-c0f9f03be46f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.374381] env[69994]: DEBUG oslo_vmware.api [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1381.374381] env[69994]: value = "task-3242938" [ 1381.374381] env[69994]: _type = "Task" [ 1381.374381] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.381566] env[69994]: DEBUG oslo_vmware.api [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242938, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.787252] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.207s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1381.787803] env[69994]: DEBUG nova.compute.manager [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1381.883889] env[69994]: DEBUG oslo_vmware.api [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242938, 'name': Rename_Task, 'duration_secs': 0.150552} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.884409] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1381.884670] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-890bc580-9eb8-457a-9dab-a8ae8243bcf7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.890712] env[69994]: DEBUG oslo_vmware.api [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1381.890712] env[69994]: value = "task-3242939" [ 1381.890712] env[69994]: _type = "Task" [ 1381.890712] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.897998] env[69994]: DEBUG oslo_vmware.api [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242939, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.292324] env[69994]: DEBUG nova.compute.utils [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1382.294105] env[69994]: DEBUG nova.compute.manager [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1382.294348] env[69994]: DEBUG nova.network.neutron [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1382.341012] env[69994]: DEBUG nova.policy [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '432370da6f1840db8f93b613ca52e31d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '42ee300d6f33459da1deb82b1b14cf74', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1382.400591] env[69994]: DEBUG oslo_vmware.api [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242939, 'name': PowerOnVM_Task, 'duration_secs': 0.481471} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.400859] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1382.497118] env[69994]: DEBUG nova.compute.manager [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1382.498045] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04f877c7-c160-49da-99fb-a7c699dd4e81 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.566358] env[69994]: DEBUG nova.network.neutron [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Successfully created port: 47b2296d-18a2-4e7f-a528-9b8992bf7d1c {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1382.797850] env[69994]: DEBUG nova.compute.manager [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1383.015893] env[69994]: DEBUG oslo_concurrency.lockutils [None req-23c7a844-e0ad-4e70-aad6-fae93c9b942f tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lock "71ee4730-f0e5-4c71-8053-be9e73b702a4" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 21.996s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1383.810941] env[69994]: DEBUG nova.compute.manager [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1383.838171] env[69994]: DEBUG nova.virt.hardware [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1383.838540] env[69994]: DEBUG nova.virt.hardware [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1383.838780] env[69994]: DEBUG nova.virt.hardware [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1383.839077] env[69994]: DEBUG nova.virt.hardware [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1383.839311] env[69994]: DEBUG nova.virt.hardware [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1383.839536] env[69994]: DEBUG nova.virt.hardware [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1383.839840] env[69994]: DEBUG nova.virt.hardware [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1383.840092] env[69994]: DEBUG nova.virt.hardware [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1383.840350] env[69994]: DEBUG nova.virt.hardware [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1383.840596] env[69994]: DEBUG nova.virt.hardware [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1383.840860] env[69994]: DEBUG nova.virt.hardware [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1383.842077] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1cc8f55-31ff-4056-a057-72beb35cc87e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.853234] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27d14f61-e566-44d4-b5e2-a7fa4716d94c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.003165] env[69994]: DEBUG nova.compute.manager [req-aacbf108-a508-4846-a35a-618d05eac16c req-fd3cda86-3d30-49c0-94e4-bdf23b9054f2 service nova] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Received event network-vif-plugged-47b2296d-18a2-4e7f-a528-9b8992bf7d1c {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1384.003165] env[69994]: DEBUG oslo_concurrency.lockutils [req-aacbf108-a508-4846-a35a-618d05eac16c req-fd3cda86-3d30-49c0-94e4-bdf23b9054f2 service nova] Acquiring lock "12a3162f-161e-4dfb-abd6-c77ce9e1785e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1384.003165] env[69994]: DEBUG oslo_concurrency.lockutils [req-aacbf108-a508-4846-a35a-618d05eac16c req-fd3cda86-3d30-49c0-94e4-bdf23b9054f2 service nova] Lock "12a3162f-161e-4dfb-abd6-c77ce9e1785e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1384.003165] env[69994]: DEBUG oslo_concurrency.lockutils [req-aacbf108-a508-4846-a35a-618d05eac16c req-fd3cda86-3d30-49c0-94e4-bdf23b9054f2 service nova] Lock "12a3162f-161e-4dfb-abd6-c77ce9e1785e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1384.003793] env[69994]: DEBUG nova.compute.manager [req-aacbf108-a508-4846-a35a-618d05eac16c req-fd3cda86-3d30-49c0-94e4-bdf23b9054f2 service nova] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] No waiting events found dispatching network-vif-plugged-47b2296d-18a2-4e7f-a528-9b8992bf7d1c {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1384.004640] env[69994]: WARNING nova.compute.manager [req-aacbf108-a508-4846-a35a-618d05eac16c req-fd3cda86-3d30-49c0-94e4-bdf23b9054f2 service nova] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Received unexpected event network-vif-plugged-47b2296d-18a2-4e7f-a528-9b8992bf7d1c for instance with vm_state building and task_state spawning. [ 1384.076508] env[69994]: DEBUG nova.network.neutron [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Successfully updated port: 47b2296d-18a2-4e7f-a528-9b8992bf7d1c {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1384.578361] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "refresh_cache-12a3162f-161e-4dfb-abd6-c77ce9e1785e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1384.578497] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquired lock "refresh_cache-12a3162f-161e-4dfb-abd6-c77ce9e1785e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1384.578570] env[69994]: DEBUG nova.network.neutron [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1385.111430] env[69994]: DEBUG nova.network.neutron [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1385.266967] env[69994]: DEBUG nova.network.neutron [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Updating instance_info_cache with network_info: [{"id": "47b2296d-18a2-4e7f-a528-9b8992bf7d1c", "address": "fa:16:3e:22:05:14", "network": {"id": "d0928b0d-e338-4b93-b259-f37cac9dde85", "bridge": "br-int", "label": "tempest-ServersTestJSON-727748163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "42ee300d6f33459da1deb82b1b14cf74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089ef678-58b4-4bf0-a39d-b94b2d364291", "external-id": "nsx-vlan-transportzone-675", "segmentation_id": 675, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47b2296d-18", "ovs_interfaceid": "47b2296d-18a2-4e7f-a528-9b8992bf7d1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1385.770154] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Releasing lock "refresh_cache-12a3162f-161e-4dfb-abd6-c77ce9e1785e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1385.770496] env[69994]: DEBUG nova.compute.manager [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Instance network_info: |[{"id": "47b2296d-18a2-4e7f-a528-9b8992bf7d1c", "address": "fa:16:3e:22:05:14", "network": {"id": "d0928b0d-e338-4b93-b259-f37cac9dde85", "bridge": "br-int", "label": "tempest-ServersTestJSON-727748163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "42ee300d6f33459da1deb82b1b14cf74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089ef678-58b4-4bf0-a39d-b94b2d364291", "external-id": "nsx-vlan-transportzone-675", "segmentation_id": 675, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47b2296d-18", "ovs_interfaceid": "47b2296d-18a2-4e7f-a528-9b8992bf7d1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1385.770914] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:22:05:14', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '089ef678-58b4-4bf0-a39d-b94b2d364291', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '47b2296d-18a2-4e7f-a528-9b8992bf7d1c', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1385.778592] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1385.778787] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1385.779011] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fc8d72a6-ef7c-4e93-b65b-8c1760ce1c64 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.799646] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1385.799646] env[69994]: value = "task-3242940" [ 1385.799646] env[69994]: _type = "Task" [ 1385.799646] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.807419] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242940, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.034662] env[69994]: DEBUG nova.compute.manager [req-0f893cbf-4570-47b0-873f-1da0dfc3ebee req-b2252b68-f2b1-4808-b94c-ebe5cb2b50cb service nova] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Received event network-changed-47b2296d-18a2-4e7f-a528-9b8992bf7d1c {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1386.035128] env[69994]: DEBUG nova.compute.manager [req-0f893cbf-4570-47b0-873f-1da0dfc3ebee req-b2252b68-f2b1-4808-b94c-ebe5cb2b50cb service nova] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Refreshing instance network info cache due to event network-changed-47b2296d-18a2-4e7f-a528-9b8992bf7d1c. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1386.035128] env[69994]: DEBUG oslo_concurrency.lockutils [req-0f893cbf-4570-47b0-873f-1da0dfc3ebee req-b2252b68-f2b1-4808-b94c-ebe5cb2b50cb service nova] Acquiring lock "refresh_cache-12a3162f-161e-4dfb-abd6-c77ce9e1785e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1386.035128] env[69994]: DEBUG oslo_concurrency.lockutils [req-0f893cbf-4570-47b0-873f-1da0dfc3ebee req-b2252b68-f2b1-4808-b94c-ebe5cb2b50cb service nova] Acquired lock "refresh_cache-12a3162f-161e-4dfb-abd6-c77ce9e1785e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1386.035480] env[69994]: DEBUG nova.network.neutron [req-0f893cbf-4570-47b0-873f-1da0dfc3ebee req-b2252b68-f2b1-4808-b94c-ebe5cb2b50cb service nova] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Refreshing network info cache for port 47b2296d-18a2-4e7f-a528-9b8992bf7d1c {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1386.145465] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1386.145880] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1386.145954] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69994) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1386.310154] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242940, 'name': CreateVM_Task, 'duration_secs': 0.339464} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.310380] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1386.310935] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1386.311117] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1386.311434] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1386.311678] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bdb89e37-573a-457c-b221-d39ca63c5bec {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.315888] env[69994]: DEBUG oslo_vmware.api [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1386.315888] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]523580d0-d4c5-8a12-9624-38be9a107be8" [ 1386.315888] env[69994]: _type = "Task" [ 1386.315888] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.324622] env[69994]: DEBUG oslo_vmware.api [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]523580d0-d4c5-8a12-9624-38be9a107be8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.748225] env[69994]: DEBUG nova.network.neutron [req-0f893cbf-4570-47b0-873f-1da0dfc3ebee req-b2252b68-f2b1-4808-b94c-ebe5cb2b50cb service nova] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Updated VIF entry in instance network info cache for port 47b2296d-18a2-4e7f-a528-9b8992bf7d1c. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1386.748648] env[69994]: DEBUG nova.network.neutron [req-0f893cbf-4570-47b0-873f-1da0dfc3ebee req-b2252b68-f2b1-4808-b94c-ebe5cb2b50cb service nova] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Updating instance_info_cache with network_info: [{"id": "47b2296d-18a2-4e7f-a528-9b8992bf7d1c", "address": "fa:16:3e:22:05:14", "network": {"id": "d0928b0d-e338-4b93-b259-f37cac9dde85", "bridge": "br-int", "label": "tempest-ServersTestJSON-727748163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "42ee300d6f33459da1deb82b1b14cf74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089ef678-58b4-4bf0-a39d-b94b2d364291", "external-id": "nsx-vlan-transportzone-675", "segmentation_id": 675, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47b2296d-18", "ovs_interfaceid": "47b2296d-18a2-4e7f-a528-9b8992bf7d1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1386.827232] env[69994]: DEBUG oslo_vmware.api [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]523580d0-d4c5-8a12-9624-38be9a107be8, 'name': SearchDatastore_Task, 'duration_secs': 0.015119} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.827478] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1386.827711] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1386.827941] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1386.828101] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1386.828278] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1386.828532] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-abe8044a-8376-44e2-875d-d2e93afe820a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.836460] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1386.836628] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1386.837308] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0f5906b-4e1f-47e0-82da-e71e1fde50a6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.842161] env[69994]: DEBUG oslo_vmware.api [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1386.842161] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52f3db41-ed38-56b6-c9e4-168a59eadcef" [ 1386.842161] env[69994]: _type = "Task" [ 1386.842161] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.850617] env[69994]: DEBUG oslo_vmware.api [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52f3db41-ed38-56b6-c9e4-168a59eadcef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.251507] env[69994]: DEBUG oslo_concurrency.lockutils [req-0f893cbf-4570-47b0-873f-1da0dfc3ebee req-b2252b68-f2b1-4808-b94c-ebe5cb2b50cb service nova] Releasing lock "refresh_cache-12a3162f-161e-4dfb-abd6-c77ce9e1785e" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1387.352785] env[69994]: DEBUG oslo_vmware.api [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52f3db41-ed38-56b6-c9e4-168a59eadcef, 'name': SearchDatastore_Task, 'duration_secs': 0.009175} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.353584] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a447c2f-5467-41ae-abc2-bf38e9ea4673 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.358653] env[69994]: DEBUG oslo_vmware.api [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1387.358653] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52b7b3d4-1ba5-f2aa-dd8f-418173b7d7db" [ 1387.358653] env[69994]: _type = "Task" [ 1387.358653] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.366430] env[69994]: DEBUG oslo_vmware.api [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52b7b3d4-1ba5-f2aa-dd8f-418173b7d7db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.869464] env[69994]: DEBUG oslo_vmware.api [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52b7b3d4-1ba5-f2aa-dd8f-418173b7d7db, 'name': SearchDatastore_Task, 'duration_secs': 0.008688} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.869717] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1387.869973] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 12a3162f-161e-4dfb-abd6-c77ce9e1785e/12a3162f-161e-4dfb-abd6-c77ce9e1785e.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1387.870241] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fe9e1dba-d9d1-4948-840a-15d543ba2cd4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.876496] env[69994]: DEBUG oslo_vmware.api [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1387.876496] env[69994]: value = "task-3242941" [ 1387.876496] env[69994]: _type = "Task" [ 1387.876496] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.883536] env[69994]: DEBUG oslo_vmware.api [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242941, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.141551] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1388.386691] env[69994]: DEBUG oslo_vmware.api [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242941, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.468167} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.387145] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] 12a3162f-161e-4dfb-abd6-c77ce9e1785e/12a3162f-161e-4dfb-abd6-c77ce9e1785e.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1388.387145] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1388.387349] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2523b2f2-5397-4c36-aa17-79c10d39c4c1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.393216] env[69994]: DEBUG oslo_vmware.api [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1388.393216] env[69994]: value = "task-3242942" [ 1388.393216] env[69994]: _type = "Task" [ 1388.393216] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.400259] env[69994]: DEBUG oslo_vmware.api [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242942, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.647232] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1388.647399] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1388.647538] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1388.902145] env[69994]: DEBUG oslo_vmware.api [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242942, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064336} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.902416] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1388.903170] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-983cb668-a899-4c21-a10e-7268680d5541 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.924081] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Reconfiguring VM instance instance-0000007a to attach disk [datastore2] 12a3162f-161e-4dfb-abd6-c77ce9e1785e/12a3162f-161e-4dfb-abd6-c77ce9e1785e.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1388.924610] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b0f1fb0e-90cd-412e-b5e8-92840dac8ebb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.945712] env[69994]: DEBUG oslo_vmware.api [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1388.945712] env[69994]: value = "task-3242943" [ 1388.945712] env[69994]: _type = "Task" [ 1388.945712] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.954766] env[69994]: DEBUG oslo_vmware.api [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242943, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.151160] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1389.151439] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1389.151613] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1389.151770] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69994) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1389.152696] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66a6d7bf-5b95-44e3-b047-3883a665288e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.160329] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5e30afd-2089-44f0-8dd6-db2882960b5a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.173925] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb8a99cd-e12c-47b9-ba76-eecef0451316 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.180285] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01ad1208-d847-4620-b8e3-47896ee5ac20 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.209343] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179773MB free_disk=120GB free_vcpus=48 pci_devices=None {{(pid=69994) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1389.209541] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1389.209701] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1389.456064] env[69994]: DEBUG oslo_vmware.api [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242943, 'name': ReconfigVM_Task, 'duration_secs': 0.281256} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.456457] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Reconfigured VM instance instance-0000007a to attach disk [datastore2] 12a3162f-161e-4dfb-abd6-c77ce9e1785e/12a3162f-161e-4dfb-abd6-c77ce9e1785e.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1389.456956] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-819633c5-c282-4a5e-958a-2bd42ea62244 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.463109] env[69994]: DEBUG oslo_vmware.api [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1389.463109] env[69994]: value = "task-3242944" [ 1389.463109] env[69994]: _type = "Task" [ 1389.463109] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.471017] env[69994]: DEBUG oslo_vmware.api [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242944, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.973118] env[69994]: DEBUG oslo_vmware.api [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242944, 'name': Rename_Task, 'duration_secs': 0.133335} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.973395] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1389.973633] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-15f266d6-dddc-4376-8d8f-ff926e513555 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.983017] env[69994]: DEBUG oslo_vmware.api [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1389.983017] env[69994]: value = "task-3242945" [ 1389.983017] env[69994]: _type = "Task" [ 1389.983017] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.992683] env[69994]: DEBUG oslo_vmware.api [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242945, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.235846] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 73288b0c-7e85-48cd-9ea1-d08a31a81c32 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1390.236010] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 902200b2-f2ca-4979-961a-ec046d22d05c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1390.236149] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 4c949f32-d395-4bcb-b998-d2f4a7741d00 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1390.236268] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 71ee4730-f0e5-4c71-8053-be9e73b702a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1390.236387] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 12a3162f-161e-4dfb-abd6-c77ce9e1785e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1390.236568] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1390.236717] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1390.304410] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76647592-6865-4a8b-9341-09c3cc97c5b6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.313347] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e1eccbe-dd3c-4914-9a13-1646720f20ca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.359433] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74d9e1e0-c55d-42ff-b15e-81e0bcedb2f7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.369380] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5051f7b0-f812-414d-95cb-fc3e796941a6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.388845] env[69994]: DEBUG nova.compute.provider_tree [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1390.484385] env[69994]: DEBUG oslo_concurrency.lockutils [None req-73cac1d8-fa19-4e89-8ef4-ed7e71269701 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "interface-902200b2-f2ca-4979-961a-ec046d22d05c-d313873d-4e4c-4a16-b543-ace74d422831" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1390.484852] env[69994]: DEBUG oslo_concurrency.lockutils [None req-73cac1d8-fa19-4e89-8ef4-ed7e71269701 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "interface-902200b2-f2ca-4979-961a-ec046d22d05c-d313873d-4e4c-4a16-b543-ace74d422831" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1390.485022] env[69994]: DEBUG nova.objects.instance [None req-73cac1d8-fa19-4e89-8ef4-ed7e71269701 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lazy-loading 'flavor' on Instance uuid 902200b2-f2ca-4979-961a-ec046d22d05c {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1390.495322] env[69994]: DEBUG oslo_vmware.api [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242945, 'name': PowerOnVM_Task, 'duration_secs': 0.431486} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.495558] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1390.495785] env[69994]: INFO nova.compute.manager [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Took 6.68 seconds to spawn the instance on the hypervisor. [ 1390.495970] env[69994]: DEBUG nova.compute.manager [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1390.496727] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e941ed1-9e61-476c-9de5-c537a03dfdc6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.892626] env[69994]: DEBUG nova.scheduler.client.report [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1391.015025] env[69994]: INFO nova.compute.manager [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Took 11.45 seconds to build instance. [ 1391.120234] env[69994]: DEBUG nova.objects.instance [None req-73cac1d8-fa19-4e89-8ef4-ed7e71269701 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lazy-loading 'pci_requests' on Instance uuid 902200b2-f2ca-4979-961a-ec046d22d05c {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1391.397736] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69994) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1391.397947] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.188s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1391.517496] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9a309270-b14a-4396-b70b-5daa67f69b1a tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "12a3162f-161e-4dfb-abd6-c77ce9e1785e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.962s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1391.622456] env[69994]: DEBUG nova.objects.base [None req-73cac1d8-fa19-4e89-8ef4-ed7e71269701 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Object Instance<902200b2-f2ca-4979-961a-ec046d22d05c> lazy-loaded attributes: flavor,pci_requests {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1391.622660] env[69994]: DEBUG nova.network.neutron [None req-73cac1d8-fa19-4e89-8ef4-ed7e71269701 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1391.679786] env[69994]: DEBUG nova.policy [None req-73cac1d8-fa19-4e89-8ef4-ed7e71269701 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aa110088642d455baaf060b5c9daaf5b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '70d05b502dfd4c5282872339c1e34d0c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1391.868575] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ba46dd6c-11db-4390-b287-66761ba3fa0e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "12a3162f-161e-4dfb-abd6-c77ce9e1785e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1391.868772] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ba46dd6c-11db-4390-b287-66761ba3fa0e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "12a3162f-161e-4dfb-abd6-c77ce9e1785e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1391.868978] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ba46dd6c-11db-4390-b287-66761ba3fa0e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "12a3162f-161e-4dfb-abd6-c77ce9e1785e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1391.869173] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ba46dd6c-11db-4390-b287-66761ba3fa0e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "12a3162f-161e-4dfb-abd6-c77ce9e1785e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1391.869343] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ba46dd6c-11db-4390-b287-66761ba3fa0e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "12a3162f-161e-4dfb-abd6-c77ce9e1785e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1391.871453] env[69994]: INFO nova.compute.manager [None req-ba46dd6c-11db-4390-b287-66761ba3fa0e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Terminating instance [ 1392.375812] env[69994]: DEBUG nova.compute.manager [None req-ba46dd6c-11db-4390-b287-66761ba3fa0e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1392.376077] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ba46dd6c-11db-4390-b287-66761ba3fa0e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1392.376986] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-639ece70-6d1c-4288-95fe-8328243c7f70 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.384470] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba46dd6c-11db-4390-b287-66761ba3fa0e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1392.384727] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8306213d-f5be-498c-94fb-bfead7226e12 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.391431] env[69994]: DEBUG oslo_vmware.api [None req-ba46dd6c-11db-4390-b287-66761ba3fa0e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1392.391431] env[69994]: value = "task-3242946" [ 1392.391431] env[69994]: _type = "Task" [ 1392.391431] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.398750] env[69994]: DEBUG oslo_vmware.api [None req-ba46dd6c-11db-4390-b287-66761ba3fa0e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242946, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.897687] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1392.898076] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1392.903465] env[69994]: DEBUG oslo_vmware.api [None req-ba46dd6c-11db-4390-b287-66761ba3fa0e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242946, 'name': PowerOffVM_Task, 'duration_secs': 0.200794} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.904449] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba46dd6c-11db-4390-b287-66761ba3fa0e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1392.904449] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ba46dd6c-11db-4390-b287-66761ba3fa0e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1392.904449] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5b534be7-47ef-4fe3-8b48-e5a6a181ab60 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.964741] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ba46dd6c-11db-4390-b287-66761ba3fa0e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1392.964982] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ba46dd6c-11db-4390-b287-66761ba3fa0e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1392.965185] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba46dd6c-11db-4390-b287-66761ba3fa0e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Deleting the datastore file [datastore2] 12a3162f-161e-4dfb-abd6-c77ce9e1785e {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1392.965455] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-994f3909-9965-449e-8e60-55b5b299f079 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.971674] env[69994]: DEBUG oslo_vmware.api [None req-ba46dd6c-11db-4390-b287-66761ba3fa0e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1392.971674] env[69994]: value = "task-3242948" [ 1392.971674] env[69994]: _type = "Task" [ 1392.971674] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.978891] env[69994]: DEBUG oslo_vmware.api [None req-ba46dd6c-11db-4390-b287-66761ba3fa0e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242948, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.027190] env[69994]: DEBUG nova.compute.manager [req-28bc08aa-f49b-4ca7-83ed-9a270d5c83fc req-59013370-1532-4255-8ad9-76e7a8278074 service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Received event network-vif-plugged-d313873d-4e4c-4a16-b543-ace74d422831 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1393.027190] env[69994]: DEBUG oslo_concurrency.lockutils [req-28bc08aa-f49b-4ca7-83ed-9a270d5c83fc req-59013370-1532-4255-8ad9-76e7a8278074 service nova] Acquiring lock "902200b2-f2ca-4979-961a-ec046d22d05c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1393.027190] env[69994]: DEBUG oslo_concurrency.lockutils [req-28bc08aa-f49b-4ca7-83ed-9a270d5c83fc req-59013370-1532-4255-8ad9-76e7a8278074 service nova] Lock "902200b2-f2ca-4979-961a-ec046d22d05c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1393.027190] env[69994]: DEBUG oslo_concurrency.lockutils [req-28bc08aa-f49b-4ca7-83ed-9a270d5c83fc req-59013370-1532-4255-8ad9-76e7a8278074 service nova] Lock "902200b2-f2ca-4979-961a-ec046d22d05c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1393.027774] env[69994]: DEBUG nova.compute.manager [req-28bc08aa-f49b-4ca7-83ed-9a270d5c83fc req-59013370-1532-4255-8ad9-76e7a8278074 service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] No waiting events found dispatching network-vif-plugged-d313873d-4e4c-4a16-b543-ace74d422831 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1393.028148] env[69994]: WARNING nova.compute.manager [req-28bc08aa-f49b-4ca7-83ed-9a270d5c83fc req-59013370-1532-4255-8ad9-76e7a8278074 service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Received unexpected event network-vif-plugged-d313873d-4e4c-4a16-b543-ace74d422831 for instance with vm_state active and task_state None. [ 1393.106226] env[69994]: DEBUG nova.network.neutron [None req-73cac1d8-fa19-4e89-8ef4-ed7e71269701 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Successfully updated port: d313873d-4e4c-4a16-b543-ace74d422831 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1393.145907] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1393.480924] env[69994]: DEBUG oslo_vmware.api [None req-ba46dd6c-11db-4390-b287-66761ba3fa0e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242948, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158515} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.481190] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba46dd6c-11db-4390-b287-66761ba3fa0e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1393.481365] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ba46dd6c-11db-4390-b287-66761ba3fa0e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1393.481547] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ba46dd6c-11db-4390-b287-66761ba3fa0e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1393.481722] env[69994]: INFO nova.compute.manager [None req-ba46dd6c-11db-4390-b287-66761ba3fa0e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1393.481963] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ba46dd6c-11db-4390-b287-66761ba3fa0e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1393.482179] env[69994]: DEBUG nova.compute.manager [-] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1393.482276] env[69994]: DEBUG nova.network.neutron [-] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1393.609084] env[69994]: DEBUG oslo_concurrency.lockutils [None req-73cac1d8-fa19-4e89-8ef4-ed7e71269701 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "refresh_cache-902200b2-f2ca-4979-961a-ec046d22d05c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1393.609290] env[69994]: DEBUG oslo_concurrency.lockutils [None req-73cac1d8-fa19-4e89-8ef4-ed7e71269701 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquired lock "refresh_cache-902200b2-f2ca-4979-961a-ec046d22d05c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1393.609469] env[69994]: DEBUG nova.network.neutron [None req-73cac1d8-fa19-4e89-8ef4-ed7e71269701 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1394.142962] env[69994]: WARNING nova.network.neutron [None req-73cac1d8-fa19-4e89-8ef4-ed7e71269701 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] d06bf010-e10f-41e7-ad34-421ab075833c already exists in list: networks containing: ['d06bf010-e10f-41e7-ad34-421ab075833c']. ignoring it [ 1394.208462] env[69994]: DEBUG nova.network.neutron [-] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1394.402860] env[69994]: DEBUG nova.network.neutron [None req-73cac1d8-fa19-4e89-8ef4-ed7e71269701 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Updating instance_info_cache with network_info: [{"id": "3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5", "address": "fa:16:3e:dc:3f:63", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cf0f0b5-84", "ovs_interfaceid": "3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d313873d-4e4c-4a16-b543-ace74d422831", "address": "fa:16:3e:99:b1:1c", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd313873d-4e", "ovs_interfaceid": "d313873d-4e4c-4a16-b543-ace74d422831", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1394.710671] env[69994]: INFO nova.compute.manager [-] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Took 1.23 seconds to deallocate network for instance. [ 1394.905228] env[69994]: DEBUG oslo_concurrency.lockutils [None req-73cac1d8-fa19-4e89-8ef4-ed7e71269701 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Releasing lock "refresh_cache-902200b2-f2ca-4979-961a-ec046d22d05c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1394.905958] env[69994]: DEBUG oslo_concurrency.lockutils [None req-73cac1d8-fa19-4e89-8ef4-ed7e71269701 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "902200b2-f2ca-4979-961a-ec046d22d05c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1394.906133] env[69994]: DEBUG oslo_concurrency.lockutils [None req-73cac1d8-fa19-4e89-8ef4-ed7e71269701 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquired lock "902200b2-f2ca-4979-961a-ec046d22d05c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1394.906971] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea59b9dc-2305-4751-a019-d56d7c7879a8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.923859] env[69994]: DEBUG nova.virt.hardware [None req-73cac1d8-fa19-4e89-8ef4-ed7e71269701 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1394.924088] env[69994]: DEBUG nova.virt.hardware [None req-73cac1d8-fa19-4e89-8ef4-ed7e71269701 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1394.924249] env[69994]: DEBUG nova.virt.hardware [None req-73cac1d8-fa19-4e89-8ef4-ed7e71269701 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1394.924435] env[69994]: DEBUG nova.virt.hardware [None req-73cac1d8-fa19-4e89-8ef4-ed7e71269701 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1394.924583] env[69994]: DEBUG nova.virt.hardware [None req-73cac1d8-fa19-4e89-8ef4-ed7e71269701 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1394.924762] env[69994]: DEBUG nova.virt.hardware [None req-73cac1d8-fa19-4e89-8ef4-ed7e71269701 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1394.924976] env[69994]: DEBUG nova.virt.hardware [None req-73cac1d8-fa19-4e89-8ef4-ed7e71269701 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1394.925152] env[69994]: DEBUG nova.virt.hardware [None req-73cac1d8-fa19-4e89-8ef4-ed7e71269701 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1394.925321] env[69994]: DEBUG nova.virt.hardware [None req-73cac1d8-fa19-4e89-8ef4-ed7e71269701 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1394.925485] env[69994]: DEBUG nova.virt.hardware [None req-73cac1d8-fa19-4e89-8ef4-ed7e71269701 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1394.925694] env[69994]: DEBUG nova.virt.hardware [None req-73cac1d8-fa19-4e89-8ef4-ed7e71269701 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1394.932089] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-73cac1d8-fa19-4e89-8ef4-ed7e71269701 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Reconfiguring VM to attach interface {{(pid=69994) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1394.932403] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5e6dbefa-d9d0-4884-8f27-e1a0636bd598 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.948987] env[69994]: DEBUG oslo_vmware.api [None req-73cac1d8-fa19-4e89-8ef4-ed7e71269701 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1394.948987] env[69994]: value = "task-3242949" [ 1394.948987] env[69994]: _type = "Task" [ 1394.948987] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.956989] env[69994]: DEBUG oslo_vmware.api [None req-73cac1d8-fa19-4e89-8ef4-ed7e71269701 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242949, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.056214] env[69994]: DEBUG nova.compute.manager [req-55d0b4f6-b4c9-4405-ad8c-a92c28fcfaf8 req-38e4a7b7-ed83-4647-b30a-4a4eb561d9d2 service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Received event network-changed-d313873d-4e4c-4a16-b543-ace74d422831 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1395.056413] env[69994]: DEBUG nova.compute.manager [req-55d0b4f6-b4c9-4405-ad8c-a92c28fcfaf8 req-38e4a7b7-ed83-4647-b30a-4a4eb561d9d2 service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Refreshing instance network info cache due to event network-changed-d313873d-4e4c-4a16-b543-ace74d422831. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1395.056634] env[69994]: DEBUG oslo_concurrency.lockutils [req-55d0b4f6-b4c9-4405-ad8c-a92c28fcfaf8 req-38e4a7b7-ed83-4647-b30a-4a4eb561d9d2 service nova] Acquiring lock "refresh_cache-902200b2-f2ca-4979-961a-ec046d22d05c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1395.056779] env[69994]: DEBUG oslo_concurrency.lockutils [req-55d0b4f6-b4c9-4405-ad8c-a92c28fcfaf8 req-38e4a7b7-ed83-4647-b30a-4a4eb561d9d2 service nova] Acquired lock "refresh_cache-902200b2-f2ca-4979-961a-ec046d22d05c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1395.056939] env[69994]: DEBUG nova.network.neutron [req-55d0b4f6-b4c9-4405-ad8c-a92c28fcfaf8 req-38e4a7b7-ed83-4647-b30a-4a4eb561d9d2 service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Refreshing network info cache for port d313873d-4e4c-4a16-b543-ace74d422831 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1395.217355] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ba46dd6c-11db-4390-b287-66761ba3fa0e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1395.217666] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ba46dd6c-11db-4390-b287-66761ba3fa0e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1395.217881] env[69994]: DEBUG nova.objects.instance [None req-ba46dd6c-11db-4390-b287-66761ba3fa0e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lazy-loading 'resources' on Instance uuid 12a3162f-161e-4dfb-abd6-c77ce9e1785e {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1395.459126] env[69994]: DEBUG oslo_vmware.api [None req-73cac1d8-fa19-4e89-8ef4-ed7e71269701 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242949, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.755154] env[69994]: DEBUG nova.network.neutron [req-55d0b4f6-b4c9-4405-ad8c-a92c28fcfaf8 req-38e4a7b7-ed83-4647-b30a-4a4eb561d9d2 service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Updated VIF entry in instance network info cache for port d313873d-4e4c-4a16-b543-ace74d422831. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1395.755591] env[69994]: DEBUG nova.network.neutron [req-55d0b4f6-b4c9-4405-ad8c-a92c28fcfaf8 req-38e4a7b7-ed83-4647-b30a-4a4eb561d9d2 service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Updating instance_info_cache with network_info: [{"id": "3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5", "address": "fa:16:3e:dc:3f:63", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cf0f0b5-84", "ovs_interfaceid": "3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d313873d-4e4c-4a16-b543-ace74d422831", "address": "fa:16:3e:99:b1:1c", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd313873d-4e", "ovs_interfaceid": "d313873d-4e4c-4a16-b543-ace74d422831", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1395.795096] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afe9ff9e-4e5b-49c4-bf45-4476b1c6012e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.803143] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9096d5c-f54b-45eb-90e9-4eb8498eeecc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.834839] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31097f86-7c07-4e27-a507-1271c4fa0d41 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.842175] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e55a58bb-df76-4591-afff-b3eaadcbaf0a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.854922] env[69994]: DEBUG nova.compute.provider_tree [None req-ba46dd6c-11db-4390-b287-66761ba3fa0e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1395.958240] env[69994]: DEBUG oslo_vmware.api [None req-73cac1d8-fa19-4e89-8ef4-ed7e71269701 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242949, 'name': ReconfigVM_Task, 'duration_secs': 0.61154} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.958706] env[69994]: DEBUG oslo_concurrency.lockutils [None req-73cac1d8-fa19-4e89-8ef4-ed7e71269701 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Releasing lock "902200b2-f2ca-4979-961a-ec046d22d05c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1395.958919] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-73cac1d8-fa19-4e89-8ef4-ed7e71269701 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Reconfigured VM to attach interface {{(pid=69994) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1396.258832] env[69994]: DEBUG oslo_concurrency.lockutils [req-55d0b4f6-b4c9-4405-ad8c-a92c28fcfaf8 req-38e4a7b7-ed83-4647-b30a-4a4eb561d9d2 service nova] Releasing lock "refresh_cache-902200b2-f2ca-4979-961a-ec046d22d05c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1396.259253] env[69994]: DEBUG nova.compute.manager [req-55d0b4f6-b4c9-4405-ad8c-a92c28fcfaf8 req-38e4a7b7-ed83-4647-b30a-4a4eb561d9d2 service nova] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Received event network-vif-deleted-47b2296d-18a2-4e7f-a528-9b8992bf7d1c {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1396.358496] env[69994]: DEBUG nova.scheduler.client.report [None req-ba46dd6c-11db-4390-b287-66761ba3fa0e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1396.463259] env[69994]: DEBUG oslo_concurrency.lockutils [None req-73cac1d8-fa19-4e89-8ef4-ed7e71269701 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "interface-902200b2-f2ca-4979-961a-ec046d22d05c-d313873d-4e4c-4a16-b543-ace74d422831" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 5.978s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1396.863343] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ba46dd6c-11db-4390-b287-66761ba3fa0e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.645s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1396.882686] env[69994]: INFO nova.scheduler.client.report [None req-ba46dd6c-11db-4390-b287-66761ba3fa0e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Deleted allocations for instance 12a3162f-161e-4dfb-abd6-c77ce9e1785e [ 1397.390880] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ba46dd6c-11db-4390-b287-66761ba3fa0e tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "12a3162f-161e-4dfb-abd6-c77ce9e1785e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.522s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1397.658687] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af8096d0-1363-421b-9fb8-bbddc6fdf0e1 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "interface-902200b2-f2ca-4979-961a-ec046d22d05c-d313873d-4e4c-4a16-b543-ace74d422831" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1397.658990] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af8096d0-1363-421b-9fb8-bbddc6fdf0e1 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "interface-902200b2-f2ca-4979-961a-ec046d22d05c-d313873d-4e4c-4a16-b543-ace74d422831" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1398.162437] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af8096d0-1363-421b-9fb8-bbddc6fdf0e1 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "902200b2-f2ca-4979-961a-ec046d22d05c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1398.162600] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af8096d0-1363-421b-9fb8-bbddc6fdf0e1 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquired lock "902200b2-f2ca-4979-961a-ec046d22d05c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1398.163608] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5afe512d-2388-4f69-9eeb-a935d25cffed {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.181011] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f90001e1-66ad-4505-aaf5-923a050fbec7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.207562] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-af8096d0-1363-421b-9fb8-bbddc6fdf0e1 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Reconfiguring VM to detach interface {{(pid=69994) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1398.207562] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-93464ac2-2166-4ae5-9725-a483ac20af2a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.226212] env[69994]: DEBUG oslo_vmware.api [None req-af8096d0-1363-421b-9fb8-bbddc6fdf0e1 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1398.226212] env[69994]: value = "task-3242950" [ 1398.226212] env[69994]: _type = "Task" [ 1398.226212] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.233922] env[69994]: DEBUG oslo_vmware.api [None req-af8096d0-1363-421b-9fb8-bbddc6fdf0e1 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242950, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.735848] env[69994]: DEBUG oslo_vmware.api [None req-af8096d0-1363-421b-9fb8-bbddc6fdf0e1 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242950, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.827119] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "a988046b-69c5-410b-8126-398e3a1c5960" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1398.827355] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "a988046b-69c5-410b-8126-398e3a1c5960" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1399.236528] env[69994]: DEBUG oslo_vmware.api [None req-af8096d0-1363-421b-9fb8-bbddc6fdf0e1 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242950, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.330281] env[69994]: DEBUG nova.compute.manager [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1399.736870] env[69994]: DEBUG oslo_vmware.api [None req-af8096d0-1363-421b-9fb8-bbddc6fdf0e1 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242950, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.856032] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1399.856032] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1399.857074] env[69994]: INFO nova.compute.claims [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1400.237811] env[69994]: DEBUG oslo_vmware.api [None req-af8096d0-1363-421b-9fb8-bbddc6fdf0e1 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242950, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.737972] env[69994]: DEBUG oslo_vmware.api [None req-af8096d0-1363-421b-9fb8-bbddc6fdf0e1 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242950, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.940225] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1af28cac-7474-417e-969e-5ff23545d25e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.948063] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d52a82f9-f2bf-481f-8017-645976bea8b0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.978206] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e8d41d5-f447-496e-8740-84a0299c3779 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.985700] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5911d5c7-93d0-4250-b361-8a3575c7dcc7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.000699] env[69994]: DEBUG nova.compute.provider_tree [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1401.238573] env[69994]: DEBUG oslo_vmware.api [None req-af8096d0-1363-421b-9fb8-bbddc6fdf0e1 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242950, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.504479] env[69994]: DEBUG nova.scheduler.client.report [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1401.739071] env[69994]: DEBUG oslo_vmware.api [None req-af8096d0-1363-421b-9fb8-bbddc6fdf0e1 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242950, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.009715] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.154s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1402.010272] env[69994]: DEBUG nova.compute.manager [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1402.239715] env[69994]: DEBUG oslo_vmware.api [None req-af8096d0-1363-421b-9fb8-bbddc6fdf0e1 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242950, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.515165] env[69994]: DEBUG nova.compute.utils [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1402.516625] env[69994]: DEBUG nova.compute.manager [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1402.516734] env[69994]: DEBUG nova.network.neutron [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: a988046b-69c5-410b-8126-398e3a1c5960] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1402.562175] env[69994]: DEBUG nova.policy [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '432370da6f1840db8f93b613ca52e31d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '42ee300d6f33459da1deb82b1b14cf74', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1402.740848] env[69994]: DEBUG oslo_vmware.api [None req-af8096d0-1363-421b-9fb8-bbddc6fdf0e1 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242950, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.786658] env[69994]: DEBUG nova.network.neutron [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Successfully created port: b4de7263-27a7-4c62-8157-dd3f4bc10582 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1403.019894] env[69994]: DEBUG nova.compute.manager [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1403.241543] env[69994]: DEBUG oslo_vmware.api [None req-af8096d0-1363-421b-9fb8-bbddc6fdf0e1 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242950, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.742167] env[69994]: DEBUG oslo_vmware.api [None req-af8096d0-1363-421b-9fb8-bbddc6fdf0e1 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242950, 'name': ReconfigVM_Task} progress is 18%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.029432] env[69994]: DEBUG nova.compute.manager [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1404.055920] env[69994]: DEBUG nova.virt.hardware [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1404.056195] env[69994]: DEBUG nova.virt.hardware [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1404.056359] env[69994]: DEBUG nova.virt.hardware [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1404.056542] env[69994]: DEBUG nova.virt.hardware [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1404.056761] env[69994]: DEBUG nova.virt.hardware [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1404.056939] env[69994]: DEBUG nova.virt.hardware [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1404.057168] env[69994]: DEBUG nova.virt.hardware [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1404.057330] env[69994]: DEBUG nova.virt.hardware [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1404.057499] env[69994]: DEBUG nova.virt.hardware [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1404.057661] env[69994]: DEBUG nova.virt.hardware [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1404.057835] env[69994]: DEBUG nova.virt.hardware [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1404.058709] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f3dd5ac-5caf-4501-b95b-b70bc40ab49e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.066662] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8671642-0d0f-42d5-bebf-5c0b3af65303 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.129675] env[69994]: DEBUG nova.compute.manager [req-6c628c4c-94e8-46f8-90a3-ea314517af7b req-b21068b1-df06-40d0-b145-63b3cdd7cd0d service nova] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Received event network-vif-plugged-b4de7263-27a7-4c62-8157-dd3f4bc10582 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1404.129890] env[69994]: DEBUG oslo_concurrency.lockutils [req-6c628c4c-94e8-46f8-90a3-ea314517af7b req-b21068b1-df06-40d0-b145-63b3cdd7cd0d service nova] Acquiring lock "a988046b-69c5-410b-8126-398e3a1c5960-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1404.130108] env[69994]: DEBUG oslo_concurrency.lockutils [req-6c628c4c-94e8-46f8-90a3-ea314517af7b req-b21068b1-df06-40d0-b145-63b3cdd7cd0d service nova] Lock "a988046b-69c5-410b-8126-398e3a1c5960-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1404.130279] env[69994]: DEBUG oslo_concurrency.lockutils [req-6c628c4c-94e8-46f8-90a3-ea314517af7b req-b21068b1-df06-40d0-b145-63b3cdd7cd0d service nova] Lock "a988046b-69c5-410b-8126-398e3a1c5960-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1404.130444] env[69994]: DEBUG nova.compute.manager [req-6c628c4c-94e8-46f8-90a3-ea314517af7b req-b21068b1-df06-40d0-b145-63b3cdd7cd0d service nova] [instance: a988046b-69c5-410b-8126-398e3a1c5960] No waiting events found dispatching network-vif-plugged-b4de7263-27a7-4c62-8157-dd3f4bc10582 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1404.130605] env[69994]: WARNING nova.compute.manager [req-6c628c4c-94e8-46f8-90a3-ea314517af7b req-b21068b1-df06-40d0-b145-63b3cdd7cd0d service nova] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Received unexpected event network-vif-plugged-b4de7263-27a7-4c62-8157-dd3f4bc10582 for instance with vm_state building and task_state spawning. [ 1404.203800] env[69994]: DEBUG nova.network.neutron [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Successfully updated port: b4de7263-27a7-4c62-8157-dd3f4bc10582 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1404.242627] env[69994]: DEBUG oslo_vmware.api [None req-af8096d0-1363-421b-9fb8-bbddc6fdf0e1 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242950, 'name': ReconfigVM_Task, 'duration_secs': 5.734019} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.242751] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af8096d0-1363-421b-9fb8-bbddc6fdf0e1 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Releasing lock "902200b2-f2ca-4979-961a-ec046d22d05c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1404.242894] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-af8096d0-1363-421b-9fb8-bbddc6fdf0e1 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Reconfigured VM to detach interface {{(pid=69994) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1404.710121] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "refresh_cache-a988046b-69c5-410b-8126-398e3a1c5960" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1404.710273] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquired lock "refresh_cache-a988046b-69c5-410b-8126-398e3a1c5960" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1404.710487] env[69994]: DEBUG nova.network.neutron [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1405.242115] env[69994]: DEBUG nova.network.neutron [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1405.352830] env[69994]: DEBUG nova.network.neutron [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Updating instance_info_cache with network_info: [{"id": "b4de7263-27a7-4c62-8157-dd3f4bc10582", "address": "fa:16:3e:68:ce:ae", "network": {"id": "d0928b0d-e338-4b93-b259-f37cac9dde85", "bridge": "br-int", "label": "tempest-ServersTestJSON-727748163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "42ee300d6f33459da1deb82b1b14cf74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089ef678-58b4-4bf0-a39d-b94b2d364291", "external-id": "nsx-vlan-transportzone-675", "segmentation_id": 675, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4de7263-27", "ovs_interfaceid": "b4de7263-27a7-4c62-8157-dd3f4bc10582", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1405.512988] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af8096d0-1363-421b-9fb8-bbddc6fdf0e1 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "refresh_cache-902200b2-f2ca-4979-961a-ec046d22d05c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1405.513158] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af8096d0-1363-421b-9fb8-bbddc6fdf0e1 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquired lock "refresh_cache-902200b2-f2ca-4979-961a-ec046d22d05c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1405.513333] env[69994]: DEBUG nova.network.neutron [None req-af8096d0-1363-421b-9fb8-bbddc6fdf0e1 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1405.855154] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Releasing lock "refresh_cache-a988046b-69c5-410b-8126-398e3a1c5960" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1405.855459] env[69994]: DEBUG nova.compute.manager [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Instance network_info: |[{"id": "b4de7263-27a7-4c62-8157-dd3f4bc10582", "address": "fa:16:3e:68:ce:ae", "network": {"id": "d0928b0d-e338-4b93-b259-f37cac9dde85", "bridge": "br-int", "label": "tempest-ServersTestJSON-727748163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "42ee300d6f33459da1deb82b1b14cf74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089ef678-58b4-4bf0-a39d-b94b2d364291", "external-id": "nsx-vlan-transportzone-675", "segmentation_id": 675, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4de7263-27", "ovs_interfaceid": "b4de7263-27a7-4c62-8157-dd3f4bc10582", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1405.855897] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:68:ce:ae', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '089ef678-58b4-4bf0-a39d-b94b2d364291', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b4de7263-27a7-4c62-8157-dd3f4bc10582', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1405.863373] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1405.863574] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1405.863790] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f6cf5867-ac68-4fe7-af15-78b83ff3bea3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.883727] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1405.883727] env[69994]: value = "task-3242951" [ 1405.883727] env[69994]: _type = "Task" [ 1405.883727] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.893901] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242951, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.159104] env[69994]: DEBUG nova.compute.manager [req-02f1a24e-d47d-42b7-b7b1-522befd3be14 req-15f39b80-be4c-4a27-9c45-0b7d66f30c5a service nova] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Received event network-changed-b4de7263-27a7-4c62-8157-dd3f4bc10582 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1406.159361] env[69994]: DEBUG nova.compute.manager [req-02f1a24e-d47d-42b7-b7b1-522befd3be14 req-15f39b80-be4c-4a27-9c45-0b7d66f30c5a service nova] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Refreshing instance network info cache due to event network-changed-b4de7263-27a7-4c62-8157-dd3f4bc10582. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1406.159726] env[69994]: DEBUG oslo_concurrency.lockutils [req-02f1a24e-d47d-42b7-b7b1-522befd3be14 req-15f39b80-be4c-4a27-9c45-0b7d66f30c5a service nova] Acquiring lock "refresh_cache-a988046b-69c5-410b-8126-398e3a1c5960" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1406.159726] env[69994]: DEBUG oslo_concurrency.lockutils [req-02f1a24e-d47d-42b7-b7b1-522befd3be14 req-15f39b80-be4c-4a27-9c45-0b7d66f30c5a service nova] Acquired lock "refresh_cache-a988046b-69c5-410b-8126-398e3a1c5960" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1406.159899] env[69994]: DEBUG nova.network.neutron [req-02f1a24e-d47d-42b7-b7b1-522befd3be14 req-15f39b80-be4c-4a27-9c45-0b7d66f30c5a service nova] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Refreshing network info cache for port b4de7263-27a7-4c62-8157-dd3f4bc10582 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1406.327034] env[69994]: INFO nova.network.neutron [None req-af8096d0-1363-421b-9fb8-bbddc6fdf0e1 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Port d313873d-4e4c-4a16-b543-ace74d422831 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1406.327414] env[69994]: DEBUG nova.network.neutron [None req-af8096d0-1363-421b-9fb8-bbddc6fdf0e1 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Updating instance_info_cache with network_info: [{"id": "3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5", "address": "fa:16:3e:dc:3f:63", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cf0f0b5-84", "ovs_interfaceid": "3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1406.393649] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242951, 'name': CreateVM_Task, 'duration_secs': 0.324418} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.393841] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1406.394455] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1406.394621] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1406.394971] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1406.395219] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d91f1f0-0b5e-4158-81f3-b3cf072a2726 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.399429] env[69994]: DEBUG oslo_vmware.api [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1406.399429] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52dd8503-14bf-2564-61e0-cfdf8809be46" [ 1406.399429] env[69994]: _type = "Task" [ 1406.399429] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.406652] env[69994]: DEBUG oslo_vmware.api [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52dd8503-14bf-2564-61e0-cfdf8809be46, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.831263] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af8096d0-1363-421b-9fb8-bbddc6fdf0e1 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Releasing lock "refresh_cache-902200b2-f2ca-4979-961a-ec046d22d05c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1406.855745] env[69994]: DEBUG nova.network.neutron [req-02f1a24e-d47d-42b7-b7b1-522befd3be14 req-15f39b80-be4c-4a27-9c45-0b7d66f30c5a service nova] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Updated VIF entry in instance network info cache for port b4de7263-27a7-4c62-8157-dd3f4bc10582. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1406.856173] env[69994]: DEBUG nova.network.neutron [req-02f1a24e-d47d-42b7-b7b1-522befd3be14 req-15f39b80-be4c-4a27-9c45-0b7d66f30c5a service nova] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Updating instance_info_cache with network_info: [{"id": "b4de7263-27a7-4c62-8157-dd3f4bc10582", "address": "fa:16:3e:68:ce:ae", "network": {"id": "d0928b0d-e338-4b93-b259-f37cac9dde85", "bridge": "br-int", "label": "tempest-ServersTestJSON-727748163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "42ee300d6f33459da1deb82b1b14cf74", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089ef678-58b4-4bf0-a39d-b94b2d364291", "external-id": "nsx-vlan-transportzone-675", "segmentation_id": 675, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4de7263-27", "ovs_interfaceid": "b4de7263-27a7-4c62-8157-dd3f4bc10582", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1406.911010] env[69994]: DEBUG oslo_vmware.api [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52dd8503-14bf-2564-61e0-cfdf8809be46, 'name': SearchDatastore_Task, 'duration_secs': 0.009051} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.911330] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1406.911569] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1406.911804] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1406.911956] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1406.912150] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1406.912421] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e25524ad-5e9b-4b17-b9ed-e1f9ba2840bf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.921079] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1406.921265] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1406.921997] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ec22f49-d2e0-466c-a9fd-d283ac7f0161 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.927484] env[69994]: DEBUG oslo_vmware.api [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1406.927484] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]521d2e20-f650-f008-d972-558c8ff3cf8d" [ 1406.927484] env[69994]: _type = "Task" [ 1406.927484] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.936057] env[69994]: DEBUG oslo_vmware.api [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521d2e20-f650-f008-d972-558c8ff3cf8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.027376] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c3d62320-8ad6-408a-b019-9c5fe157a0ed tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "interface-4c949f32-d395-4bcb-b998-d2f4a7741d00-d313873d-4e4c-4a16-b543-ace74d422831" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1407.027685] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c3d62320-8ad6-408a-b019-9c5fe157a0ed tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "interface-4c949f32-d395-4bcb-b998-d2f4a7741d00-d313873d-4e4c-4a16-b543-ace74d422831" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1407.027988] env[69994]: DEBUG nova.objects.instance [None req-c3d62320-8ad6-408a-b019-9c5fe157a0ed tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lazy-loading 'flavor' on Instance uuid 4c949f32-d395-4bcb-b998-d2f4a7741d00 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1407.335242] env[69994]: DEBUG oslo_concurrency.lockutils [None req-af8096d0-1363-421b-9fb8-bbddc6fdf0e1 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "interface-902200b2-f2ca-4979-961a-ec046d22d05c-d313873d-4e4c-4a16-b543-ace74d422831" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.676s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1407.359111] env[69994]: DEBUG oslo_concurrency.lockutils [req-02f1a24e-d47d-42b7-b7b1-522befd3be14 req-15f39b80-be4c-4a27-9c45-0b7d66f30c5a service nova] Releasing lock "refresh_cache-a988046b-69c5-410b-8126-398e3a1c5960" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1407.440581] env[69994]: DEBUG oslo_vmware.api [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521d2e20-f650-f008-d972-558c8ff3cf8d, 'name': SearchDatastore_Task, 'duration_secs': 0.009448} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.441753] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec90700b-6598-450c-97c7-df79dd32c592 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.448847] env[69994]: DEBUG oslo_vmware.api [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1407.448847] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52176c51-0f29-42ea-427a-351ba66f21c5" [ 1407.448847] env[69994]: _type = "Task" [ 1407.448847] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.458714] env[69994]: DEBUG oslo_vmware.api [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52176c51-0f29-42ea-427a-351ba66f21c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.618962] env[69994]: DEBUG nova.objects.instance [None req-c3d62320-8ad6-408a-b019-9c5fe157a0ed tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lazy-loading 'pci_requests' on Instance uuid 4c949f32-d395-4bcb-b998-d2f4a7741d00 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1407.960076] env[69994]: DEBUG oslo_vmware.api [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52176c51-0f29-42ea-427a-351ba66f21c5, 'name': SearchDatastore_Task, 'duration_secs': 0.015673} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.960309] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1407.960568] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] a988046b-69c5-410b-8126-398e3a1c5960/a988046b-69c5-410b-8126-398e3a1c5960.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1407.960832] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-17eafa4b-223f-46e5-a21e-4b9cf61ad11e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.967838] env[69994]: DEBUG oslo_vmware.api [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1407.967838] env[69994]: value = "task-3242952" [ 1407.967838] env[69994]: _type = "Task" [ 1407.967838] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.976098] env[69994]: DEBUG oslo_vmware.api [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242952, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.122149] env[69994]: DEBUG nova.objects.base [None req-c3d62320-8ad6-408a-b019-9c5fe157a0ed tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Object Instance<4c949f32-d395-4bcb-b998-d2f4a7741d00> lazy-loaded attributes: flavor,pci_requests {{(pid=69994) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1408.122375] env[69994]: DEBUG nova.network.neutron [None req-c3d62320-8ad6-408a-b019-9c5fe157a0ed tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1408.194173] env[69994]: DEBUG nova.policy [None req-c3d62320-8ad6-408a-b019-9c5fe157a0ed tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aa110088642d455baaf060b5c9daaf5b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '70d05b502dfd4c5282872339c1e34d0c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1408.203969] env[69994]: DEBUG nova.compute.manager [req-9936f34d-0535-491b-a236-f7e00b57e6e4 req-48a16bd3-0680-4d6a-b103-9d280b7e2265 service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Received event network-changed-3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1408.204189] env[69994]: DEBUG nova.compute.manager [req-9936f34d-0535-491b-a236-f7e00b57e6e4 req-48a16bd3-0680-4d6a-b103-9d280b7e2265 service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Refreshing instance network info cache due to event network-changed-3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1408.204409] env[69994]: DEBUG oslo_concurrency.lockutils [req-9936f34d-0535-491b-a236-f7e00b57e6e4 req-48a16bd3-0680-4d6a-b103-9d280b7e2265 service nova] Acquiring lock "refresh_cache-902200b2-f2ca-4979-961a-ec046d22d05c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1408.204553] env[69994]: DEBUG oslo_concurrency.lockutils [req-9936f34d-0535-491b-a236-f7e00b57e6e4 req-48a16bd3-0680-4d6a-b103-9d280b7e2265 service nova] Acquired lock "refresh_cache-902200b2-f2ca-4979-961a-ec046d22d05c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1408.204707] env[69994]: DEBUG nova.network.neutron [req-9936f34d-0535-491b-a236-f7e00b57e6e4 req-48a16bd3-0680-4d6a-b103-9d280b7e2265 service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Refreshing network info cache for port 3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1408.477534] env[69994]: DEBUG oslo_vmware.api [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242952, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.492334} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.477887] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] a988046b-69c5-410b-8126-398e3a1c5960/a988046b-69c5-410b-8126-398e3a1c5960.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1408.478045] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1408.478257] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-222fd82e-5132-4cdb-ac24-b469dfa3f735 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.484078] env[69994]: DEBUG oslo_vmware.api [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1408.484078] env[69994]: value = "task-3242953" [ 1408.484078] env[69994]: _type = "Task" [ 1408.484078] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.491143] env[69994]: DEBUG oslo_vmware.api [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242953, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.881036] env[69994]: DEBUG nova.network.neutron [req-9936f34d-0535-491b-a236-f7e00b57e6e4 req-48a16bd3-0680-4d6a-b103-9d280b7e2265 service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Updated VIF entry in instance network info cache for port 3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1408.881417] env[69994]: DEBUG nova.network.neutron [req-9936f34d-0535-491b-a236-f7e00b57e6e4 req-48a16bd3-0680-4d6a-b103-9d280b7e2265 service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Updating instance_info_cache with network_info: [{"id": "3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5", "address": "fa:16:3e:dc:3f:63", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cf0f0b5-84", "ovs_interfaceid": "3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1408.993876] env[69994]: DEBUG oslo_vmware.api [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242953, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061561} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.994111] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1408.994856] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e58cff34-6bf4-4dfb-a66e-0a5557716f90 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.016057] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Reconfiguring VM instance instance-0000007b to attach disk [datastore2] a988046b-69c5-410b-8126-398e3a1c5960/a988046b-69c5-410b-8126-398e3a1c5960.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1409.016279] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fdc7e477-31d2-4b21-8cce-e62f8881585e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.034935] env[69994]: DEBUG oslo_vmware.api [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1409.034935] env[69994]: value = "task-3242954" [ 1409.034935] env[69994]: _type = "Task" [ 1409.034935] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.042386] env[69994]: DEBUG oslo_vmware.api [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242954, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.384234] env[69994]: DEBUG oslo_concurrency.lockutils [req-9936f34d-0535-491b-a236-f7e00b57e6e4 req-48a16bd3-0680-4d6a-b103-9d280b7e2265 service nova] Releasing lock "refresh_cache-902200b2-f2ca-4979-961a-ec046d22d05c" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1409.384465] env[69994]: DEBUG nova.compute.manager [req-9936f34d-0535-491b-a236-f7e00b57e6e4 req-48a16bd3-0680-4d6a-b103-9d280b7e2265 service nova] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Received event network-changed-18df8f55-9b6c-4093-a622-a8129ca51490 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1409.384621] env[69994]: DEBUG nova.compute.manager [req-9936f34d-0535-491b-a236-f7e00b57e6e4 req-48a16bd3-0680-4d6a-b103-9d280b7e2265 service nova] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Refreshing instance network info cache due to event network-changed-18df8f55-9b6c-4093-a622-a8129ca51490. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1409.384899] env[69994]: DEBUG oslo_concurrency.lockutils [req-9936f34d-0535-491b-a236-f7e00b57e6e4 req-48a16bd3-0680-4d6a-b103-9d280b7e2265 service nova] Acquiring lock "refresh_cache-4c949f32-d395-4bcb-b998-d2f4a7741d00" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1409.385065] env[69994]: DEBUG oslo_concurrency.lockutils [req-9936f34d-0535-491b-a236-f7e00b57e6e4 req-48a16bd3-0680-4d6a-b103-9d280b7e2265 service nova] Acquired lock "refresh_cache-4c949f32-d395-4bcb-b998-d2f4a7741d00" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1409.385239] env[69994]: DEBUG nova.network.neutron [req-9936f34d-0535-491b-a236-f7e00b57e6e4 req-48a16bd3-0680-4d6a-b103-9d280b7e2265 service nova] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Refreshing network info cache for port 18df8f55-9b6c-4093-a622-a8129ca51490 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1409.545692] env[69994]: DEBUG oslo_vmware.api [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242954, 'name': ReconfigVM_Task, 'duration_secs': 0.273094} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.546059] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Reconfigured VM instance instance-0000007b to attach disk [datastore2] a988046b-69c5-410b-8126-398e3a1c5960/a988046b-69c5-410b-8126-398e3a1c5960.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1409.546837] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-94fee9a0-7318-4055-9162-32797ddc9363 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.553241] env[69994]: DEBUG oslo_vmware.api [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1409.553241] env[69994]: value = "task-3242955" [ 1409.553241] env[69994]: _type = "Task" [ 1409.553241] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.561076] env[69994]: DEBUG oslo_vmware.api [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242955, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.873213] env[69994]: DEBUG nova.network.neutron [None req-c3d62320-8ad6-408a-b019-9c5fe157a0ed tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Successfully updated port: d313873d-4e4c-4a16-b543-ace74d422831 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1410.063304] env[69994]: DEBUG oslo_vmware.api [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242955, 'name': Rename_Task, 'duration_secs': 0.14085} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.063589] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1410.063837] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5a07e375-6b21-412d-9347-f021f08f2cc7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.069975] env[69994]: DEBUG oslo_vmware.api [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1410.069975] env[69994]: value = "task-3242956" [ 1410.069975] env[69994]: _type = "Task" [ 1410.069975] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.077058] env[69994]: DEBUG oslo_vmware.api [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242956, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.078149] env[69994]: DEBUG nova.network.neutron [req-9936f34d-0535-491b-a236-f7e00b57e6e4 req-48a16bd3-0680-4d6a-b103-9d280b7e2265 service nova] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Updated VIF entry in instance network info cache for port 18df8f55-9b6c-4093-a622-a8129ca51490. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1410.078496] env[69994]: DEBUG nova.network.neutron [req-9936f34d-0535-491b-a236-f7e00b57e6e4 req-48a16bd3-0680-4d6a-b103-9d280b7e2265 service nova] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Updating instance_info_cache with network_info: [{"id": "18df8f55-9b6c-4093-a622-a8129ca51490", "address": "fa:16:3e:76:fe:e5", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18df8f55-9b", "ovs_interfaceid": "18df8f55-9b6c-4093-a622-a8129ca51490", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1410.234492] env[69994]: DEBUG nova.compute.manager [req-ddac2d9e-917b-4ff8-8385-3ad1ddaf829f req-c84af945-9f0a-4785-af49-64f4f9913a0b service nova] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Received event network-vif-plugged-d313873d-4e4c-4a16-b543-ace74d422831 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1410.234746] env[69994]: DEBUG oslo_concurrency.lockutils [req-ddac2d9e-917b-4ff8-8385-3ad1ddaf829f req-c84af945-9f0a-4785-af49-64f4f9913a0b service nova] Acquiring lock "4c949f32-d395-4bcb-b998-d2f4a7741d00-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1410.235009] env[69994]: DEBUG oslo_concurrency.lockutils [req-ddac2d9e-917b-4ff8-8385-3ad1ddaf829f req-c84af945-9f0a-4785-af49-64f4f9913a0b service nova] Lock "4c949f32-d395-4bcb-b998-d2f4a7741d00-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1410.235232] env[69994]: DEBUG oslo_concurrency.lockutils [req-ddac2d9e-917b-4ff8-8385-3ad1ddaf829f req-c84af945-9f0a-4785-af49-64f4f9913a0b service nova] Lock "4c949f32-d395-4bcb-b998-d2f4a7741d00-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1410.235440] env[69994]: DEBUG nova.compute.manager [req-ddac2d9e-917b-4ff8-8385-3ad1ddaf829f req-c84af945-9f0a-4785-af49-64f4f9913a0b service nova] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] No waiting events found dispatching network-vif-plugged-d313873d-4e4c-4a16-b543-ace74d422831 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1410.235643] env[69994]: WARNING nova.compute.manager [req-ddac2d9e-917b-4ff8-8385-3ad1ddaf829f req-c84af945-9f0a-4785-af49-64f4f9913a0b service nova] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Received unexpected event network-vif-plugged-d313873d-4e4c-4a16-b543-ace74d422831 for instance with vm_state active and task_state None. [ 1410.235840] env[69994]: DEBUG nova.compute.manager [req-ddac2d9e-917b-4ff8-8385-3ad1ddaf829f req-c84af945-9f0a-4785-af49-64f4f9913a0b service nova] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Received event network-changed-d313873d-4e4c-4a16-b543-ace74d422831 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1410.236058] env[69994]: DEBUG nova.compute.manager [req-ddac2d9e-917b-4ff8-8385-3ad1ddaf829f req-c84af945-9f0a-4785-af49-64f4f9913a0b service nova] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Refreshing instance network info cache due to event network-changed-d313873d-4e4c-4a16-b543-ace74d422831. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1410.236274] env[69994]: DEBUG oslo_concurrency.lockutils [req-ddac2d9e-917b-4ff8-8385-3ad1ddaf829f req-c84af945-9f0a-4785-af49-64f4f9913a0b service nova] Acquiring lock "refresh_cache-4c949f32-d395-4bcb-b998-d2f4a7741d00" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1410.375856] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c3d62320-8ad6-408a-b019-9c5fe157a0ed tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "refresh_cache-4c949f32-d395-4bcb-b998-d2f4a7741d00" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1410.580151] env[69994]: DEBUG oslo_vmware.api [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242956, 'name': PowerOnVM_Task, 'duration_secs': 0.420206} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.580632] env[69994]: DEBUG oslo_concurrency.lockutils [req-9936f34d-0535-491b-a236-f7e00b57e6e4 req-48a16bd3-0680-4d6a-b103-9d280b7e2265 service nova] Releasing lock "refresh_cache-4c949f32-d395-4bcb-b998-d2f4a7741d00" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1410.581036] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1410.581236] env[69994]: INFO nova.compute.manager [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Took 6.55 seconds to spawn the instance on the hypervisor. [ 1410.581414] env[69994]: DEBUG nova.compute.manager [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1410.581705] env[69994]: DEBUG oslo_concurrency.lockutils [req-ddac2d9e-917b-4ff8-8385-3ad1ddaf829f req-c84af945-9f0a-4785-af49-64f4f9913a0b service nova] Acquired lock "refresh_cache-4c949f32-d395-4bcb-b998-d2f4a7741d00" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1410.581876] env[69994]: DEBUG nova.network.neutron [req-ddac2d9e-917b-4ff8-8385-3ad1ddaf829f req-c84af945-9f0a-4785-af49-64f4f9913a0b service nova] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Refreshing network info cache for port d313873d-4e4c-4a16-b543-ace74d422831 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1410.583487] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-628e75f9-3965-4ef5-b015-17f5b1456989 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.100811] env[69994]: INFO nova.compute.manager [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Took 11.27 seconds to build instance. [ 1411.267471] env[69994]: DEBUG nova.network.neutron [req-ddac2d9e-917b-4ff8-8385-3ad1ddaf829f req-c84af945-9f0a-4785-af49-64f4f9913a0b service nova] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Added VIF to instance network info cache for port d313873d-4e4c-4a16-b543-ace74d422831. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3546}} [ 1411.267872] env[69994]: DEBUG nova.network.neutron [req-ddac2d9e-917b-4ff8-8385-3ad1ddaf829f req-c84af945-9f0a-4785-af49-64f4f9913a0b service nova] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Updating instance_info_cache with network_info: [{"id": "18df8f55-9b6c-4093-a622-a8129ca51490", "address": "fa:16:3e:76:fe:e5", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18df8f55-9b", "ovs_interfaceid": "18df8f55-9b6c-4093-a622-a8129ca51490", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d313873d-4e4c-4a16-b543-ace74d422831", "address": "fa:16:3e:99:b1:1c", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd313873d-4e", "ovs_interfaceid": "d313873d-4e4c-4a16-b543-ace74d422831", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1411.603264] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ea8d797b-ecc0-4f05-a81a-bcdd0b43b7ae tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "a988046b-69c5-410b-8126-398e3a1c5960" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.776s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1411.770879] env[69994]: DEBUG oslo_concurrency.lockutils [req-ddac2d9e-917b-4ff8-8385-3ad1ddaf829f req-c84af945-9f0a-4785-af49-64f4f9913a0b service nova] Releasing lock "refresh_cache-4c949f32-d395-4bcb-b998-d2f4a7741d00" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1411.771327] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c3d62320-8ad6-408a-b019-9c5fe157a0ed tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquired lock "refresh_cache-4c949f32-d395-4bcb-b998-d2f4a7741d00" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1411.771510] env[69994]: DEBUG nova.network.neutron [None req-c3d62320-8ad6-408a-b019-9c5fe157a0ed tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1412.096482] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b2b0398b-c03c-425c-8bbd-20d506005158 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "a988046b-69c5-410b-8126-398e3a1c5960" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1412.096751] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b2b0398b-c03c-425c-8bbd-20d506005158 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "a988046b-69c5-410b-8126-398e3a1c5960" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1412.096938] env[69994]: DEBUG nova.compute.manager [None req-b2b0398b-c03c-425c-8bbd-20d506005158 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1412.097869] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86610e05-0d9c-43b5-96ce-984e5eb3daca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.105079] env[69994]: DEBUG nova.compute.manager [None req-b2b0398b-c03c-425c-8bbd-20d506005158 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69994) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1412.105615] env[69994]: DEBUG nova.objects.instance [None req-b2b0398b-c03c-425c-8bbd-20d506005158 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lazy-loading 'flavor' on Instance uuid a988046b-69c5-410b-8126-398e3a1c5960 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1412.305192] env[69994]: WARNING nova.network.neutron [None req-c3d62320-8ad6-408a-b019-9c5fe157a0ed tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] d06bf010-e10f-41e7-ad34-421ab075833c already exists in list: networks containing: ['d06bf010-e10f-41e7-ad34-421ab075833c']. ignoring it [ 1412.305406] env[69994]: WARNING nova.network.neutron [None req-c3d62320-8ad6-408a-b019-9c5fe157a0ed tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] d06bf010-e10f-41e7-ad34-421ab075833c already exists in list: networks containing: ['d06bf010-e10f-41e7-ad34-421ab075833c']. ignoring it [ 1412.305574] env[69994]: WARNING nova.network.neutron [None req-c3d62320-8ad6-408a-b019-9c5fe157a0ed tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] d313873d-4e4c-4a16-b543-ace74d422831 already exists in list: port_ids containing: ['d313873d-4e4c-4a16-b543-ace74d422831']. ignoring it [ 1412.554088] env[69994]: DEBUG nova.network.neutron [None req-c3d62320-8ad6-408a-b019-9c5fe157a0ed tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Updating instance_info_cache with network_info: [{"id": "18df8f55-9b6c-4093-a622-a8129ca51490", "address": "fa:16:3e:76:fe:e5", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18df8f55-9b", "ovs_interfaceid": "18df8f55-9b6c-4093-a622-a8129ca51490", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d313873d-4e4c-4a16-b543-ace74d422831", "address": "fa:16:3e:99:b1:1c", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd313873d-4e", "ovs_interfaceid": "d313873d-4e4c-4a16-b543-ace74d422831", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1413.057646] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c3d62320-8ad6-408a-b019-9c5fe157a0ed tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Releasing lock "refresh_cache-4c949f32-d395-4bcb-b998-d2f4a7741d00" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1413.058336] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c3d62320-8ad6-408a-b019-9c5fe157a0ed tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "4c949f32-d395-4bcb-b998-d2f4a7741d00" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1413.058501] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c3d62320-8ad6-408a-b019-9c5fe157a0ed tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquired lock "4c949f32-d395-4bcb-b998-d2f4a7741d00" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1413.059367] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81cd593f-69cc-4a80-a2bb-0be4c4b8791b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.077169] env[69994]: DEBUG nova.virt.hardware [None req-c3d62320-8ad6-408a-b019-9c5fe157a0ed tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1413.077393] env[69994]: DEBUG nova.virt.hardware [None req-c3d62320-8ad6-408a-b019-9c5fe157a0ed tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1413.077549] env[69994]: DEBUG nova.virt.hardware [None req-c3d62320-8ad6-408a-b019-9c5fe157a0ed tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1413.077733] env[69994]: DEBUG nova.virt.hardware [None req-c3d62320-8ad6-408a-b019-9c5fe157a0ed tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1413.077878] env[69994]: DEBUG nova.virt.hardware [None req-c3d62320-8ad6-408a-b019-9c5fe157a0ed tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1413.078039] env[69994]: DEBUG nova.virt.hardware [None req-c3d62320-8ad6-408a-b019-9c5fe157a0ed tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1413.078251] env[69994]: DEBUG nova.virt.hardware [None req-c3d62320-8ad6-408a-b019-9c5fe157a0ed tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1413.078410] env[69994]: DEBUG nova.virt.hardware [None req-c3d62320-8ad6-408a-b019-9c5fe157a0ed tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1413.078574] env[69994]: DEBUG nova.virt.hardware [None req-c3d62320-8ad6-408a-b019-9c5fe157a0ed tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1413.078735] env[69994]: DEBUG nova.virt.hardware [None req-c3d62320-8ad6-408a-b019-9c5fe157a0ed tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1413.078907] env[69994]: DEBUG nova.virt.hardware [None req-c3d62320-8ad6-408a-b019-9c5fe157a0ed tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1413.085184] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c3d62320-8ad6-408a-b019-9c5fe157a0ed tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Reconfiguring VM to attach interface {{(pid=69994) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1413.085493] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb7d34c3-20b2-4adf-bed2-5da5e70280a3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.102809] env[69994]: DEBUG oslo_vmware.api [None req-c3d62320-8ad6-408a-b019-9c5fe157a0ed tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1413.102809] env[69994]: value = "task-3242957" [ 1413.102809] env[69994]: _type = "Task" [ 1413.102809] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.110543] env[69994]: DEBUG oslo_vmware.api [None req-c3d62320-8ad6-408a-b019-9c5fe157a0ed tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242957, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.112106] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2b0398b-c03c-425c-8bbd-20d506005158 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1413.112353] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-68d51d5f-597b-4305-b139-d47357c25ed6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.117207] env[69994]: DEBUG oslo_vmware.api [None req-b2b0398b-c03c-425c-8bbd-20d506005158 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1413.117207] env[69994]: value = "task-3242958" [ 1413.117207] env[69994]: _type = "Task" [ 1413.117207] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.124981] env[69994]: DEBUG oslo_vmware.api [None req-b2b0398b-c03c-425c-8bbd-20d506005158 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242958, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.612614] env[69994]: DEBUG oslo_vmware.api [None req-c3d62320-8ad6-408a-b019-9c5fe157a0ed tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242957, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.625793] env[69994]: DEBUG oslo_vmware.api [None req-b2b0398b-c03c-425c-8bbd-20d506005158 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242958, 'name': PowerOffVM_Task, 'duration_secs': 0.186883} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.626083] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2b0398b-c03c-425c-8bbd-20d506005158 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1413.626283] env[69994]: DEBUG nova.compute.manager [None req-b2b0398b-c03c-425c-8bbd-20d506005158 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1413.627008] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f35f63f-c0b5-4a1c-8e55-a92cf8c93017 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.113108] env[69994]: DEBUG oslo_vmware.api [None req-c3d62320-8ad6-408a-b019-9c5fe157a0ed tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242957, 'name': ReconfigVM_Task, 'duration_secs': 0.608721} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.113602] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c3d62320-8ad6-408a-b019-9c5fe157a0ed tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Releasing lock "4c949f32-d395-4bcb-b998-d2f4a7741d00" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1414.113812] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-c3d62320-8ad6-408a-b019-9c5fe157a0ed tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Reconfigured VM to attach interface {{(pid=69994) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1414.137693] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b2b0398b-c03c-425c-8bbd-20d506005158 tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "a988046b-69c5-410b-8126-398e3a1c5960" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.041s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1414.618540] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c3d62320-8ad6-408a-b019-9c5fe157a0ed tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "interface-4c949f32-d395-4bcb-b998-d2f4a7741d00-d313873d-4e4c-4a16-b543-ace74d422831" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.591s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1414.652682] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2151b241-1fd1-42d8-a09f-4c45d293764f tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "a988046b-69c5-410b-8126-398e3a1c5960" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1414.653012] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2151b241-1fd1-42d8-a09f-4c45d293764f tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "a988046b-69c5-410b-8126-398e3a1c5960" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1414.653246] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2151b241-1fd1-42d8-a09f-4c45d293764f tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "a988046b-69c5-410b-8126-398e3a1c5960-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1414.653435] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2151b241-1fd1-42d8-a09f-4c45d293764f tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "a988046b-69c5-410b-8126-398e3a1c5960-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1414.653601] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2151b241-1fd1-42d8-a09f-4c45d293764f tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "a988046b-69c5-410b-8126-398e3a1c5960-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1414.655558] env[69994]: INFO nova.compute.manager [None req-2151b241-1fd1-42d8-a09f-4c45d293764f tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Terminating instance [ 1415.158676] env[69994]: DEBUG nova.compute.manager [None req-2151b241-1fd1-42d8-a09f-4c45d293764f tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1415.159138] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2151b241-1fd1-42d8-a09f-4c45d293764f tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1415.159817] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d72b0674-74e5-42cd-a367-70033055d76a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.167400] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2151b241-1fd1-42d8-a09f-4c45d293764f tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1415.167622] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b2ea7472-3bf2-49af-a645-2e8bfc0d76e3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.226594] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2151b241-1fd1-42d8-a09f-4c45d293764f tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1415.226814] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2151b241-1fd1-42d8-a09f-4c45d293764f tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1415.226986] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2151b241-1fd1-42d8-a09f-4c45d293764f tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Deleting the datastore file [datastore2] a988046b-69c5-410b-8126-398e3a1c5960 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1415.227272] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-576e3533-f996-4123-a5df-1bdcf29d74f3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.233525] env[69994]: DEBUG oslo_vmware.api [None req-2151b241-1fd1-42d8-a09f-4c45d293764f tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1415.233525] env[69994]: value = "task-3242960" [ 1415.233525] env[69994]: _type = "Task" [ 1415.233525] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.240604] env[69994]: DEBUG oslo_vmware.api [None req-2151b241-1fd1-42d8-a09f-4c45d293764f tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242960, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.744521] env[69994]: DEBUG oslo_vmware.api [None req-2151b241-1fd1-42d8-a09f-4c45d293764f tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242960, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.124267} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.744723] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-2151b241-1fd1-42d8-a09f-4c45d293764f tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1415.744931] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2151b241-1fd1-42d8-a09f-4c45d293764f tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1415.745127] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-2151b241-1fd1-42d8-a09f-4c45d293764f tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1415.745304] env[69994]: INFO nova.compute.manager [None req-2151b241-1fd1-42d8-a09f-4c45d293764f tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1415.745569] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2151b241-1fd1-42d8-a09f-4c45d293764f tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1415.745754] env[69994]: DEBUG nova.compute.manager [-] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1415.745850] env[69994]: DEBUG nova.network.neutron [-] [instance: a988046b-69c5-410b-8126-398e3a1c5960] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1415.957396] env[69994]: DEBUG nova.compute.manager [req-87572ac5-1c02-402c-b8e0-27e6f62607b9 req-45f51299-ef26-4241-ae52-6618410415a4 service nova] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Received event network-vif-deleted-b4de7263-27a7-4c62-8157-dd3f4bc10582 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1415.957539] env[69994]: INFO nova.compute.manager [req-87572ac5-1c02-402c-b8e0-27e6f62607b9 req-45f51299-ef26-4241-ae52-6618410415a4 service nova] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Neutron deleted interface b4de7263-27a7-4c62-8157-dd3f4bc10582; detaching it from the instance and deleting it from the info cache [ 1415.957580] env[69994]: DEBUG nova.network.neutron [req-87572ac5-1c02-402c-b8e0-27e6f62607b9 req-45f51299-ef26-4241-ae52-6618410415a4 service nova] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1416.211650] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7a463a54-f2f9-4e2f-8420-5ce2538b9148 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "interface-4c949f32-d395-4bcb-b998-d2f4a7741d00-d313873d-4e4c-4a16-b543-ace74d422831" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1416.212108] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7a463a54-f2f9-4e2f-8420-5ce2538b9148 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "interface-4c949f32-d395-4bcb-b998-d2f4a7741d00-d313873d-4e4c-4a16-b543-ace74d422831" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1416.442055] env[69994]: DEBUG nova.network.neutron [-] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1416.460715] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a031bcf0-2ce6-4f25-a6f2-06ba2ef03f56 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.470749] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50c075fc-fbbf-44db-827b-19b3933f8cc6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.496500] env[69994]: DEBUG nova.compute.manager [req-87572ac5-1c02-402c-b8e0-27e6f62607b9 req-45f51299-ef26-4241-ae52-6618410415a4 service nova] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Detach interface failed, port_id=b4de7263-27a7-4c62-8157-dd3f4bc10582, reason: Instance a988046b-69c5-410b-8126-398e3a1c5960 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1416.715111] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7a463a54-f2f9-4e2f-8420-5ce2538b9148 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "4c949f32-d395-4bcb-b998-d2f4a7741d00" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1416.715303] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7a463a54-f2f9-4e2f-8420-5ce2538b9148 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquired lock "4c949f32-d395-4bcb-b998-d2f4a7741d00" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1416.716107] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f257464d-a65b-4815-b83d-e290d58eedaa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.733721] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4016df92-3454-4cef-bede-e883bc2ccc40 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.759770] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7a463a54-f2f9-4e2f-8420-5ce2538b9148 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Reconfiguring VM to detach interface {{(pid=69994) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1416.760144] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-98f75cc8-04c8-42db-add0-d47e843452e8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.779561] env[69994]: DEBUG oslo_vmware.api [None req-7a463a54-f2f9-4e2f-8420-5ce2538b9148 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1416.779561] env[69994]: value = "task-3242961" [ 1416.779561] env[69994]: _type = "Task" [ 1416.779561] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.787114] env[69994]: DEBUG oslo_vmware.api [None req-7a463a54-f2f9-4e2f-8420-5ce2538b9148 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242961, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.944975] env[69994]: INFO nova.compute.manager [-] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Took 1.20 seconds to deallocate network for instance. [ 1417.289196] env[69994]: DEBUG oslo_vmware.api [None req-7a463a54-f2f9-4e2f-8420-5ce2538b9148 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242961, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.451144] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2151b241-1fd1-42d8-a09f-4c45d293764f tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1417.451461] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2151b241-1fd1-42d8-a09f-4c45d293764f tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1417.451681] env[69994]: DEBUG nova.objects.instance [None req-2151b241-1fd1-42d8-a09f-4c45d293764f tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lazy-loading 'resources' on Instance uuid a988046b-69c5-410b-8126-398e3a1c5960 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1417.789955] env[69994]: DEBUG oslo_vmware.api [None req-7a463a54-f2f9-4e2f-8420-5ce2538b9148 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242961, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.024645] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7704a2d-b27e-4734-955a-fea84e3e8667 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.031780] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16406586-86c0-440e-92ed-ffd8d8120b4a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.061083] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0663e157-d2ad-45d1-bf79-66be9069cd59 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.067625] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa173023-a929-4caf-9abe-653ffc2cddcb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.080266] env[69994]: DEBUG nova.compute.provider_tree [None req-2151b241-1fd1-42d8-a09f-4c45d293764f tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1418.291763] env[69994]: DEBUG oslo_vmware.api [None req-7a463a54-f2f9-4e2f-8420-5ce2538b9148 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242961, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.583503] env[69994]: DEBUG nova.scheduler.client.report [None req-2151b241-1fd1-42d8-a09f-4c45d293764f tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1418.794402] env[69994]: DEBUG oslo_vmware.api [None req-7a463a54-f2f9-4e2f-8420-5ce2538b9148 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242961, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.088689] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2151b241-1fd1-42d8-a09f-4c45d293764f tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.637s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1419.107453] env[69994]: INFO nova.scheduler.client.report [None req-2151b241-1fd1-42d8-a09f-4c45d293764f tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Deleted allocations for instance a988046b-69c5-410b-8126-398e3a1c5960 [ 1419.183718] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1f2f5f9b-f3a3-4173-971a-4b8ac3e46bfe tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquiring lock "71ee4730-f0e5-4c71-8053-be9e73b702a4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1419.183989] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1f2f5f9b-f3a3-4173-971a-4b8ac3e46bfe tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lock "71ee4730-f0e5-4c71-8053-be9e73b702a4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1419.184398] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1f2f5f9b-f3a3-4173-971a-4b8ac3e46bfe tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquiring lock "71ee4730-f0e5-4c71-8053-be9e73b702a4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1419.184398] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1f2f5f9b-f3a3-4173-971a-4b8ac3e46bfe tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lock "71ee4730-f0e5-4c71-8053-be9e73b702a4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1419.184617] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1f2f5f9b-f3a3-4173-971a-4b8ac3e46bfe tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lock "71ee4730-f0e5-4c71-8053-be9e73b702a4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1419.186546] env[69994]: INFO nova.compute.manager [None req-1f2f5f9b-f3a3-4173-971a-4b8ac3e46bfe tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Terminating instance [ 1419.291361] env[69994]: DEBUG oslo_vmware.api [None req-7a463a54-f2f9-4e2f-8420-5ce2538b9148 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242961, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.614807] env[69994]: DEBUG oslo_concurrency.lockutils [None req-2151b241-1fd1-42d8-a09f-4c45d293764f tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "a988046b-69c5-410b-8126-398e3a1c5960" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.962s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1419.689681] env[69994]: DEBUG nova.compute.manager [None req-1f2f5f9b-f3a3-4173-971a-4b8ac3e46bfe tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1419.689921] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1f2f5f9b-f3a3-4173-971a-4b8ac3e46bfe tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1419.690849] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81f1a167-eec6-43a5-8153-277e6164e8f8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.699055] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f2f5f9b-f3a3-4173-971a-4b8ac3e46bfe tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1419.699301] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-01058317-0284-40c6-a493-6204318c79bc {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.706408] env[69994]: DEBUG oslo_vmware.api [None req-1f2f5f9b-f3a3-4173-971a-4b8ac3e46bfe tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1419.706408] env[69994]: value = "task-3242962" [ 1419.706408] env[69994]: _type = "Task" [ 1419.706408] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.714417] env[69994]: DEBUG oslo_vmware.api [None req-1f2f5f9b-f3a3-4173-971a-4b8ac3e46bfe tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242962, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.792486] env[69994]: DEBUG oslo_vmware.api [None req-7a463a54-f2f9-4e2f-8420-5ce2538b9148 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242961, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.215767] env[69994]: DEBUG oslo_vmware.api [None req-1f2f5f9b-f3a3-4173-971a-4b8ac3e46bfe tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242962, 'name': PowerOffVM_Task, 'duration_secs': 0.16209} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.216045] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f2f5f9b-f3a3-4173-971a-4b8ac3e46bfe tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1420.216237] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1f2f5f9b-f3a3-4173-971a-4b8ac3e46bfe tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1420.216478] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6e78da5c-75ff-41e7-84b4-9f8429318e41 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.275627] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1f2f5f9b-f3a3-4173-971a-4b8ac3e46bfe tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1420.275827] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1f2f5f9b-f3a3-4173-971a-4b8ac3e46bfe tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1420.275956] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f2f5f9b-f3a3-4173-971a-4b8ac3e46bfe tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Deleting the datastore file [datastore2] 71ee4730-f0e5-4c71-8053-be9e73b702a4 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1420.276269] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c2ac5054-7c60-4706-b2c4-373b714ce8bd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.282589] env[69994]: DEBUG oslo_vmware.api [None req-1f2f5f9b-f3a3-4173-971a-4b8ac3e46bfe tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for the task: (returnval){ [ 1420.282589] env[69994]: value = "task-3242964" [ 1420.282589] env[69994]: _type = "Task" [ 1420.282589] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.294143] env[69994]: DEBUG oslo_vmware.api [None req-1f2f5f9b-f3a3-4173-971a-4b8ac3e46bfe tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242964, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.297244] env[69994]: DEBUG oslo_vmware.api [None req-7a463a54-f2f9-4e2f-8420-5ce2538b9148 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242961, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.413075] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9a5e1e4a-cd4d-4532-9246-bfc7fb8e30db tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "73288b0c-7e85-48cd-9ea1-d08a31a81c32" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1420.413359] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9a5e1e4a-cd4d-4532-9246-bfc7fb8e30db tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "73288b0c-7e85-48cd-9ea1-d08a31a81c32" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1420.413575] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9a5e1e4a-cd4d-4532-9246-bfc7fb8e30db tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "73288b0c-7e85-48cd-9ea1-d08a31a81c32-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1420.413758] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9a5e1e4a-cd4d-4532-9246-bfc7fb8e30db tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "73288b0c-7e85-48cd-9ea1-d08a31a81c32-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1420.413927] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9a5e1e4a-cd4d-4532-9246-bfc7fb8e30db tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "73288b0c-7e85-48cd-9ea1-d08a31a81c32-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1420.416122] env[69994]: INFO nova.compute.manager [None req-9a5e1e4a-cd4d-4532-9246-bfc7fb8e30db tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Terminating instance [ 1420.795361] env[69994]: DEBUG oslo_vmware.api [None req-1f2f5f9b-f3a3-4173-971a-4b8ac3e46bfe tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Task: {'id': task-3242964, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.114453} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.798441] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f2f5f9b-f3a3-4173-971a-4b8ac3e46bfe tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1420.798635] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1f2f5f9b-f3a3-4173-971a-4b8ac3e46bfe tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1420.798809] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-1f2f5f9b-f3a3-4173-971a-4b8ac3e46bfe tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1420.798981] env[69994]: INFO nova.compute.manager [None req-1f2f5f9b-f3a3-4173-971a-4b8ac3e46bfe tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1420.799237] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1f2f5f9b-f3a3-4173-971a-4b8ac3e46bfe tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1420.799434] env[69994]: DEBUG oslo_vmware.api [None req-7a463a54-f2f9-4e2f-8420-5ce2538b9148 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242961, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.799631] env[69994]: DEBUG nova.compute.manager [-] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1420.799723] env[69994]: DEBUG nova.network.neutron [-] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1420.920500] env[69994]: DEBUG nova.compute.manager [None req-9a5e1e4a-cd4d-4532-9246-bfc7fb8e30db tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1420.920500] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9a5e1e4a-cd4d-4532-9246-bfc7fb8e30db tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1420.921388] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3152d266-b209-4f27-87a8-032cee92c245 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.930531] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a5e1e4a-cd4d-4532-9246-bfc7fb8e30db tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1420.930635] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ad95ca7c-cba0-4ec5-9ea4-12afc032e0eb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.936934] env[69994]: DEBUG oslo_vmware.api [None req-9a5e1e4a-cd4d-4532-9246-bfc7fb8e30db tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1420.936934] env[69994]: value = "task-3242965" [ 1420.936934] env[69994]: _type = "Task" [ 1420.936934] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.946871] env[69994]: DEBUG oslo_vmware.api [None req-9a5e1e4a-cd4d-4532-9246-bfc7fb8e30db tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242965, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.293799] env[69994]: DEBUG nova.compute.manager [req-3924f203-181b-451e-b2f4-469c9e1157c0 req-79a05116-490d-45cd-8661-8f005a548426 service nova] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Received event network-vif-deleted-06234607-a0e8-40a9-8a07-6f4502407064 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1421.293997] env[69994]: INFO nova.compute.manager [req-3924f203-181b-451e-b2f4-469c9e1157c0 req-79a05116-490d-45cd-8661-8f005a548426 service nova] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Neutron deleted interface 06234607-a0e8-40a9-8a07-6f4502407064; detaching it from the instance and deleting it from the info cache [ 1421.294185] env[69994]: DEBUG nova.network.neutron [req-3924f203-181b-451e-b2f4-469c9e1157c0 req-79a05116-490d-45cd-8661-8f005a548426 service nova] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1421.301208] env[69994]: DEBUG oslo_vmware.api [None req-7a463a54-f2f9-4e2f-8420-5ce2538b9148 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242961, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.446512] env[69994]: DEBUG oslo_vmware.api [None req-9a5e1e4a-cd4d-4532-9246-bfc7fb8e30db tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242965, 'name': PowerOffVM_Task, 'duration_secs': 0.184053} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.446770] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a5e1e4a-cd4d-4532-9246-bfc7fb8e30db tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1421.446939] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9a5e1e4a-cd4d-4532-9246-bfc7fb8e30db tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1421.447270] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b09c13ff-ef3a-47e6-b538-ee5c4bb45820 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.513180] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9a5e1e4a-cd4d-4532-9246-bfc7fb8e30db tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1421.513427] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9a5e1e4a-cd4d-4532-9246-bfc7fb8e30db tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1421.513614] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a5e1e4a-cd4d-4532-9246-bfc7fb8e30db tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Deleting the datastore file [datastore1] 73288b0c-7e85-48cd-9ea1-d08a31a81c32 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1421.513882] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d0a420c2-9359-4b7d-8c05-0ddafcca525f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.519757] env[69994]: DEBUG oslo_vmware.api [None req-9a5e1e4a-cd4d-4532-9246-bfc7fb8e30db tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for the task: (returnval){ [ 1421.519757] env[69994]: value = "task-3242967" [ 1421.519757] env[69994]: _type = "Task" [ 1421.519757] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.527246] env[69994]: DEBUG oslo_vmware.api [None req-9a5e1e4a-cd4d-4532-9246-bfc7fb8e30db tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242967, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.770995] env[69994]: DEBUG nova.network.neutron [-] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1421.798969] env[69994]: DEBUG oslo_vmware.api [None req-7a463a54-f2f9-4e2f-8420-5ce2538b9148 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242961, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.799238] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-22b6ad66-f77c-4459-9ef7-24bbb47fb03c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.807517] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6d97689-467e-4c73-9d2d-abcff4101730 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.833909] env[69994]: DEBUG nova.compute.manager [req-3924f203-181b-451e-b2f4-469c9e1157c0 req-79a05116-490d-45cd-8661-8f005a548426 service nova] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Detach interface failed, port_id=06234607-a0e8-40a9-8a07-6f4502407064, reason: Instance 71ee4730-f0e5-4c71-8053-be9e73b702a4 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1422.029629] env[69994]: DEBUG oslo_vmware.api [None req-9a5e1e4a-cd4d-4532-9246-bfc7fb8e30db tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Task: {'id': task-3242967, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158532} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.029878] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a5e1e4a-cd4d-4532-9246-bfc7fb8e30db tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1422.030072] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9a5e1e4a-cd4d-4532-9246-bfc7fb8e30db tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1422.030254] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9a5e1e4a-cd4d-4532-9246-bfc7fb8e30db tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1422.030428] env[69994]: INFO nova.compute.manager [None req-9a5e1e4a-cd4d-4532-9246-bfc7fb8e30db tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1422.030662] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9a5e1e4a-cd4d-4532-9246-bfc7fb8e30db tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1422.030847] env[69994]: DEBUG nova.compute.manager [-] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1422.030952] env[69994]: DEBUG nova.network.neutron [-] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1422.274335] env[69994]: INFO nova.compute.manager [-] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Took 1.47 seconds to deallocate network for instance. [ 1422.299279] env[69994]: DEBUG oslo_vmware.api [None req-7a463a54-f2f9-4e2f-8420-5ce2538b9148 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242961, 'name': ReconfigVM_Task} progress is 18%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.780717] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1f2f5f9b-f3a3-4173-971a-4b8ac3e46bfe tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1422.781037] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1f2f5f9b-f3a3-4173-971a-4b8ac3e46bfe tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1422.781278] env[69994]: DEBUG nova.objects.instance [None req-1f2f5f9b-f3a3-4173-971a-4b8ac3e46bfe tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lazy-loading 'resources' on Instance uuid 71ee4730-f0e5-4c71-8053-be9e73b702a4 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1422.782328] env[69994]: DEBUG nova.network.neutron [-] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1422.799363] env[69994]: DEBUG oslo_vmware.api [None req-7a463a54-f2f9-4e2f-8420-5ce2538b9148 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242961, 'name': ReconfigVM_Task, 'duration_secs': 5.728141} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.799629] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7a463a54-f2f9-4e2f-8420-5ce2538b9148 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Releasing lock "4c949f32-d395-4bcb-b998-d2f4a7741d00" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1422.799802] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-7a463a54-f2f9-4e2f-8420-5ce2538b9148 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Reconfigured VM to detach interface {{(pid=69994) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1423.285605] env[69994]: INFO nova.compute.manager [-] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Took 1.25 seconds to deallocate network for instance. [ 1423.323318] env[69994]: DEBUG nova.compute.manager [req-d58b792b-7e4a-490c-8423-b83df604b252 req-8dc88cdf-c85a-488c-9349-466a7c386c14 service nova] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Received event network-vif-deleted-895c33b4-795e-44d8-b1c8-fa7abde175c8 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1423.351559] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c95a5546-da26-4ec8-a527-d06de54afb55 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.358693] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3da999ad-79d5-4c7a-81c1-637cd807255b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.388615] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0971027c-bfd1-4b4a-b907-13756c2368ae {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.395265] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1304e515-a28b-4c09-ae3a-68528e03c867 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.409463] env[69994]: DEBUG nova.compute.provider_tree [None req-1f2f5f9b-f3a3-4173-971a-4b8ac3e46bfe tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1423.795024] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9a5e1e4a-cd4d-4532-9246-bfc7fb8e30db tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1423.912456] env[69994]: DEBUG nova.scheduler.client.report [None req-1f2f5f9b-f3a3-4173-971a-4b8ac3e46bfe tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1424.067762] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7a463a54-f2f9-4e2f-8420-5ce2538b9148 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "refresh_cache-4c949f32-d395-4bcb-b998-d2f4a7741d00" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1424.067941] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7a463a54-f2f9-4e2f-8420-5ce2538b9148 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquired lock "refresh_cache-4c949f32-d395-4bcb-b998-d2f4a7741d00" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1424.068160] env[69994]: DEBUG nova.network.neutron [None req-7a463a54-f2f9-4e2f-8420-5ce2538b9148 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1424.417305] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1f2f5f9b-f3a3-4173-971a-4b8ac3e46bfe tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.636s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1424.419652] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9a5e1e4a-cd4d-4532-9246-bfc7fb8e30db tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.625s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1424.419938] env[69994]: DEBUG nova.objects.instance [None req-9a5e1e4a-cd4d-4532-9246-bfc7fb8e30db tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lazy-loading 'resources' on Instance uuid 73288b0c-7e85-48cd-9ea1-d08a31a81c32 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1424.443643] env[69994]: INFO nova.scheduler.client.report [None req-1f2f5f9b-f3a3-4173-971a-4b8ac3e46bfe tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Deleted allocations for instance 71ee4730-f0e5-4c71-8053-be9e73b702a4 [ 1424.639684] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9e4bf00c-9686-472a-9256-36864cca9a90 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "4c949f32-d395-4bcb-b998-d2f4a7741d00" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1424.639938] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9e4bf00c-9686-472a-9256-36864cca9a90 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "4c949f32-d395-4bcb-b998-d2f4a7741d00" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1424.640594] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9e4bf00c-9686-472a-9256-36864cca9a90 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "4c949f32-d395-4bcb-b998-d2f4a7741d00-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1424.640796] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9e4bf00c-9686-472a-9256-36864cca9a90 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "4c949f32-d395-4bcb-b998-d2f4a7741d00-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1424.640968] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9e4bf00c-9686-472a-9256-36864cca9a90 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "4c949f32-d395-4bcb-b998-d2f4a7741d00-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1424.644779] env[69994]: INFO nova.compute.manager [None req-9e4bf00c-9686-472a-9256-36864cca9a90 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Terminating instance [ 1424.779114] env[69994]: INFO nova.network.neutron [None req-7a463a54-f2f9-4e2f-8420-5ce2538b9148 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Port d313873d-4e4c-4a16-b543-ace74d422831 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1424.779490] env[69994]: DEBUG nova.network.neutron [None req-7a463a54-f2f9-4e2f-8420-5ce2538b9148 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Updating instance_info_cache with network_info: [{"id": "18df8f55-9b6c-4093-a622-a8129ca51490", "address": "fa:16:3e:76:fe:e5", "network": {"id": "d06bf010-e10f-41e7-ad34-421ab075833c", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-140469634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "70d05b502dfd4c5282872339c1e34d0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18df8f55-9b", "ovs_interfaceid": "18df8f55-9b6c-4093-a622-a8129ca51490", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1424.951823] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1f2f5f9b-f3a3-4173-971a-4b8ac3e46bfe tempest-AttachVolumeShelveTestJSON-1772287420 tempest-AttachVolumeShelveTestJSON-1772287420-project-member] Lock "71ee4730-f0e5-4c71-8053-be9e73b702a4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.768s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1424.977973] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d3f957e-9a85-4466-a28c-e57e9cd83db9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.985166] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-499847a3-9f6b-4376-9d4a-1445a9940572 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.015981] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-194e0a58-cf4b-4dc2-a158-2b06cc08b64a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.023045] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c35493c5-6bf4-4aaf-826c-ce63593b199f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.036154] env[69994]: DEBUG nova.compute.provider_tree [None req-9a5e1e4a-cd4d-4532-9246-bfc7fb8e30db tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1425.148759] env[69994]: DEBUG nova.compute.manager [None req-9e4bf00c-9686-472a-9256-36864cca9a90 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1425.149111] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9e4bf00c-9686-472a-9256-36864cca9a90 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1425.149969] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8177c56-a60b-46ca-996f-ea397f5dd499 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.157362] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e4bf00c-9686-472a-9256-36864cca9a90 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1425.157592] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2e56d9a9-cadc-4ef3-8d97-c0d70bc749e5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.163393] env[69994]: DEBUG oslo_vmware.api [None req-9e4bf00c-9686-472a-9256-36864cca9a90 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1425.163393] env[69994]: value = "task-3242968" [ 1425.163393] env[69994]: _type = "Task" [ 1425.163393] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.170989] env[69994]: DEBUG oslo_vmware.api [None req-9e4bf00c-9686-472a-9256-36864cca9a90 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242968, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.282361] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7a463a54-f2f9-4e2f-8420-5ce2538b9148 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Releasing lock "refresh_cache-4c949f32-d395-4bcb-b998-d2f4a7741d00" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1425.539114] env[69994]: DEBUG nova.scheduler.client.report [None req-9a5e1e4a-cd4d-4532-9246-bfc7fb8e30db tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1425.673154] env[69994]: DEBUG oslo_vmware.api [None req-9e4bf00c-9686-472a-9256-36864cca9a90 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242968, 'name': PowerOffVM_Task, 'duration_secs': 0.178662} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.673443] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e4bf00c-9686-472a-9256-36864cca9a90 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1425.673615] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9e4bf00c-9686-472a-9256-36864cca9a90 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1425.673857] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9b01d585-c89e-4c45-bd61-91f505bc699e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.734512] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9e4bf00c-9686-472a-9256-36864cca9a90 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1425.734742] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9e4bf00c-9686-472a-9256-36864cca9a90 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1425.734982] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e4bf00c-9686-472a-9256-36864cca9a90 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Deleting the datastore file [datastore1] 4c949f32-d395-4bcb-b998-d2f4a7741d00 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1425.735294] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-889c1278-8880-4107-b9aa-650d602a8fc2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.741650] env[69994]: DEBUG oslo_vmware.api [None req-9e4bf00c-9686-472a-9256-36864cca9a90 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1425.741650] env[69994]: value = "task-3242970" [ 1425.741650] env[69994]: _type = "Task" [ 1425.741650] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.749364] env[69994]: DEBUG oslo_vmware.api [None req-9e4bf00c-9686-472a-9256-36864cca9a90 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242970, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.786466] env[69994]: DEBUG oslo_concurrency.lockutils [None req-7a463a54-f2f9-4e2f-8420-5ce2538b9148 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "interface-4c949f32-d395-4bcb-b998-d2f4a7741d00-d313873d-4e4c-4a16-b543-ace74d422831" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.574s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1426.044431] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9a5e1e4a-cd4d-4532-9246-bfc7fb8e30db tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.625s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1426.066188] env[69994]: INFO nova.scheduler.client.report [None req-9a5e1e4a-cd4d-4532-9246-bfc7fb8e30db tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Deleted allocations for instance 73288b0c-7e85-48cd-9ea1-d08a31a81c32 [ 1426.253187] env[69994]: DEBUG oslo_vmware.api [None req-9e4bf00c-9686-472a-9256-36864cca9a90 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242970, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142003} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.253484] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e4bf00c-9686-472a-9256-36864cca9a90 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1426.253648] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9e4bf00c-9686-472a-9256-36864cca9a90 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1426.253820] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-9e4bf00c-9686-472a-9256-36864cca9a90 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1426.253990] env[69994]: INFO nova.compute.manager [None req-9e4bf00c-9686-472a-9256-36864cca9a90 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1426.254246] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9e4bf00c-9686-472a-9256-36864cca9a90 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1426.254453] env[69994]: DEBUG nova.compute.manager [-] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1426.254524] env[69994]: DEBUG nova.network.neutron [-] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1426.573964] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9a5e1e4a-cd4d-4532-9246-bfc7fb8e30db tempest-ServersTestJSON-478926025 tempest-ServersTestJSON-478926025-project-member] Lock "73288b0c-7e85-48cd-9ea1-d08a31a81c32" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.160s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1426.909320] env[69994]: DEBUG nova.compute.manager [req-69abb500-25a4-4550-b260-5913a601f1ee req-dfff213a-d099-4188-beb7-d308c892159a service nova] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Received event network-vif-deleted-18df8f55-9b6c-4093-a622-a8129ca51490 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1426.909521] env[69994]: INFO nova.compute.manager [req-69abb500-25a4-4550-b260-5913a601f1ee req-dfff213a-d099-4188-beb7-d308c892159a service nova] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Neutron deleted interface 18df8f55-9b6c-4093-a622-a8129ca51490; detaching it from the instance and deleting it from the info cache [ 1426.909653] env[69994]: DEBUG nova.network.neutron [req-69abb500-25a4-4550-b260-5913a601f1ee req-dfff213a-d099-4188-beb7-d308c892159a service nova] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1427.386174] env[69994]: DEBUG nova.network.neutron [-] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1427.415413] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-568e4f95-9d72-48fb-a297-963b4a4066e6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.424159] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8d6af20-b95e-4ee4-ae3f-bf7f557e2a17 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.450125] env[69994]: DEBUG nova.compute.manager [req-69abb500-25a4-4550-b260-5913a601f1ee req-dfff213a-d099-4188-beb7-d308c892159a service nova] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Detach interface failed, port_id=18df8f55-9b6c-4093-a622-a8129ca51490, reason: Instance 4c949f32-d395-4bcb-b998-d2f4a7741d00 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1427.890106] env[69994]: INFO nova.compute.manager [-] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Took 1.64 seconds to deallocate network for instance. [ 1428.399065] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9e4bf00c-9686-472a-9256-36864cca9a90 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1428.399393] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9e4bf00c-9686-472a-9256-36864cca9a90 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1428.399649] env[69994]: DEBUG nova.objects.instance [None req-9e4bf00c-9686-472a-9256-36864cca9a90 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lazy-loading 'resources' on Instance uuid 4c949f32-d395-4bcb-b998-d2f4a7741d00 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1428.944794] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0a9a5e3-22b0-4760-bb9e-ab59fb681017 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.952368] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77890e34-f10c-44da-b1d3-2f82d78e83a1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.982880] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e70be31b-7aea-4ca1-9047-4b2323a6097c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.989444] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb58221a-731a-4b82-937b-0165b174e714 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.002015] env[69994]: DEBUG nova.compute.provider_tree [None req-9e4bf00c-9686-472a-9256-36864cca9a90 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1429.504850] env[69994]: DEBUG nova.scheduler.client.report [None req-9e4bf00c-9686-472a-9256-36864cca9a90 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1430.009245] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9e4bf00c-9686-472a-9256-36864cca9a90 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.610s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1430.026070] env[69994]: INFO nova.scheduler.client.report [None req-9e4bf00c-9686-472a-9256-36864cca9a90 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Deleted allocations for instance 4c949f32-d395-4bcb-b998-d2f4a7741d00 [ 1430.533954] env[69994]: DEBUG oslo_concurrency.lockutils [None req-9e4bf00c-9686-472a-9256-36864cca9a90 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "4c949f32-d395-4bcb-b998-d2f4a7741d00" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.893s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1431.286028] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d6b95b0f-8b79-48a5-8f14-6b7e7c436bb4 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "902200b2-f2ca-4979-961a-ec046d22d05c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1431.286028] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d6b95b0f-8b79-48a5-8f14-6b7e7c436bb4 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "902200b2-f2ca-4979-961a-ec046d22d05c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1431.286217] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d6b95b0f-8b79-48a5-8f14-6b7e7c436bb4 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "902200b2-f2ca-4979-961a-ec046d22d05c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1431.286457] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d6b95b0f-8b79-48a5-8f14-6b7e7c436bb4 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "902200b2-f2ca-4979-961a-ec046d22d05c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1431.286534] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d6b95b0f-8b79-48a5-8f14-6b7e7c436bb4 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "902200b2-f2ca-4979-961a-ec046d22d05c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1431.288762] env[69994]: INFO nova.compute.manager [None req-d6b95b0f-8b79-48a5-8f14-6b7e7c436bb4 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Terminating instance [ 1431.793454] env[69994]: DEBUG nova.compute.manager [None req-d6b95b0f-8b79-48a5-8f14-6b7e7c436bb4 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1431.793454] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b95b0f-8b79-48a5-8f14-6b7e7c436bb4 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1431.793885] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1cc0b87-1d62-4ef9-8ed9-2375a06a7692 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.801626] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6b95b0f-8b79-48a5-8f14-6b7e7c436bb4 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1431.801848] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f136dfd8-f3a5-416f-882f-ad03bd47d681 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.807809] env[69994]: DEBUG oslo_vmware.api [None req-d6b95b0f-8b79-48a5-8f14-6b7e7c436bb4 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1431.807809] env[69994]: value = "task-3242972" [ 1431.807809] env[69994]: _type = "Task" [ 1431.807809] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.816490] env[69994]: DEBUG oslo_vmware.api [None req-d6b95b0f-8b79-48a5-8f14-6b7e7c436bb4 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242972, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.317074] env[69994]: DEBUG oslo_vmware.api [None req-d6b95b0f-8b79-48a5-8f14-6b7e7c436bb4 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242972, 'name': PowerOffVM_Task, 'duration_secs': 0.222118} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.317293] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6b95b0f-8b79-48a5-8f14-6b7e7c436bb4 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1432.317490] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b95b0f-8b79-48a5-8f14-6b7e7c436bb4 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1432.317736] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4e03001a-3960-47db-870d-fb0fe966e257 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.389561] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b95b0f-8b79-48a5-8f14-6b7e7c436bb4 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1432.389790] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b95b0f-8b79-48a5-8f14-6b7e7c436bb4 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1432.389978] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6b95b0f-8b79-48a5-8f14-6b7e7c436bb4 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Deleting the datastore file [datastore2] 902200b2-f2ca-4979-961a-ec046d22d05c {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1432.390274] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d998b3b2-7d0c-4fe6-8a76-b915c405f234 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.398018] env[69994]: DEBUG oslo_vmware.api [None req-d6b95b0f-8b79-48a5-8f14-6b7e7c436bb4 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for the task: (returnval){ [ 1432.398018] env[69994]: value = "task-3242974" [ 1432.398018] env[69994]: _type = "Task" [ 1432.398018] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.405196] env[69994]: DEBUG oslo_vmware.api [None req-d6b95b0f-8b79-48a5-8f14-6b7e7c436bb4 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242974, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.907217] env[69994]: DEBUG oslo_vmware.api [None req-d6b95b0f-8b79-48a5-8f14-6b7e7c436bb4 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Task: {'id': task-3242974, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157031} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.907650] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6b95b0f-8b79-48a5-8f14-6b7e7c436bb4 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1432.907706] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b95b0f-8b79-48a5-8f14-6b7e7c436bb4 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1432.907841] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b95b0f-8b79-48a5-8f14-6b7e7c436bb4 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1432.908027] env[69994]: INFO nova.compute.manager [None req-d6b95b0f-8b79-48a5-8f14-6b7e7c436bb4 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1432.908283] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d6b95b0f-8b79-48a5-8f14-6b7e7c436bb4 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1432.908473] env[69994]: DEBUG nova.compute.manager [-] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1432.908561] env[69994]: DEBUG nova.network.neutron [-] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1433.203856] env[69994]: DEBUG nova.compute.manager [req-8f4aae66-2d0a-4b06-ac5a-621205e86d7b req-4fb2fb96-0642-4304-8f78-adb47216676c service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Received event network-vif-deleted-3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1433.203941] env[69994]: INFO nova.compute.manager [req-8f4aae66-2d0a-4b06-ac5a-621205e86d7b req-4fb2fb96-0642-4304-8f78-adb47216676c service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Neutron deleted interface 3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5; detaching it from the instance and deleting it from the info cache [ 1433.204178] env[69994]: DEBUG nova.network.neutron [req-8f4aae66-2d0a-4b06-ac5a-621205e86d7b req-4fb2fb96-0642-4304-8f78-adb47216676c service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1433.689452] env[69994]: DEBUG nova.network.neutron [-] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1433.706574] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f1a13c57-d897-46b3-b9c5-6f6e527d6b36 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.716440] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8762db74-2987-4b42-b23c-1f4a8b3b0768 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.739540] env[69994]: DEBUG nova.compute.manager [req-8f4aae66-2d0a-4b06-ac5a-621205e86d7b req-4fb2fb96-0642-4304-8f78-adb47216676c service nova] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Detach interface failed, port_id=3cf0f0b5-845c-4d4d-bfb9-93d6bd382cb5, reason: Instance 902200b2-f2ca-4979-961a-ec046d22d05c could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1434.191790] env[69994]: INFO nova.compute.manager [-] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Took 1.28 seconds to deallocate network for instance. [ 1434.697956] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d6b95b0f-8b79-48a5-8f14-6b7e7c436bb4 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1434.698278] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d6b95b0f-8b79-48a5-8f14-6b7e7c436bb4 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1434.698507] env[69994]: DEBUG nova.objects.instance [None req-d6b95b0f-8b79-48a5-8f14-6b7e7c436bb4 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lazy-loading 'resources' on Instance uuid 902200b2-f2ca-4979-961a-ec046d22d05c {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1435.232187] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18e9592a-5f8f-40cd-9a06-7cc734f28aac {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.239875] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2732e9d5-412a-406a-9521-801bfd956cd7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.270806] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cc3ce8d-6e5d-4066-96d4-f7925e38bccd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.277716] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5091ed2e-d8dd-4440-816b-2f0dcc9c1e87 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.290442] env[69994]: DEBUG nova.compute.provider_tree [None req-d6b95b0f-8b79-48a5-8f14-6b7e7c436bb4 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1435.793255] env[69994]: DEBUG nova.scheduler.client.report [None req-d6b95b0f-8b79-48a5-8f14-6b7e7c436bb4 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1436.298405] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d6b95b0f-8b79-48a5-8f14-6b7e7c436bb4 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.600s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1436.316501] env[69994]: INFO nova.scheduler.client.report [None req-d6b95b0f-8b79-48a5-8f14-6b7e7c436bb4 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Deleted allocations for instance 902200b2-f2ca-4979-961a-ec046d22d05c [ 1436.824350] env[69994]: DEBUG oslo_concurrency.lockutils [None req-d6b95b0f-8b79-48a5-8f14-6b7e7c436bb4 tempest-AttachInterfacesTestJSON-859920981 tempest-AttachInterfacesTestJSON-859920981-project-member] Lock "902200b2-f2ca-4979-961a-ec046d22d05c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.538s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1442.940872] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "37d54d80-9cad-4cf4-9b8f-4790bbf4664d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1442.941502] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "37d54d80-9cad-4cf4-9b8f-4790bbf4664d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1443.447409] env[69994]: DEBUG nova.compute.manager [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1443.972261] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1443.972543] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1443.974078] env[69994]: INFO nova.compute.claims [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1445.009278] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-226abab8-709c-457c-818f-e1c3c6cdb64c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.017152] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d49fe3e-3261-40ac-a3af-dd9cd1874067 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.046125] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2f248d2-9d9c-4ac5-9e44-8b881a583ca3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.052856] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf882eb4-6a73-4ace-a2c9-4ca9cd916f65 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.065260] env[69994]: DEBUG nova.compute.provider_tree [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1445.568205] env[69994]: DEBUG nova.scheduler.client.report [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1446.073529] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.101s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1446.074045] env[69994]: DEBUG nova.compute.manager [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1446.146198] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1446.146380] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Cleaning up deleted instances {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11834}} [ 1446.579227] env[69994]: DEBUG nova.compute.utils [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1446.580593] env[69994]: DEBUG nova.compute.manager [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1446.580771] env[69994]: DEBUG nova.network.neutron [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1446.623276] env[69994]: DEBUG nova.policy [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c4eba6562c3a41d0bfb7aeb393a600b3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8a7bbd48e22345cc9c3f09a574143d7f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1446.654986] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] There are 29 instances to clean {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11843}} [ 1446.655200] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: a988046b-69c5-410b-8126-398e3a1c5960] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1446.870644] env[69994]: DEBUG nova.network.neutron [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Successfully created port: 7d02dd51-d5ca-4bf6-babe-300c991fc6bf {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1447.085635] env[69994]: DEBUG nova.compute.manager [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1447.160290] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 12a3162f-161e-4dfb-abd6-c77ce9e1785e] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1447.663699] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 4c949f32-d395-4bcb-b998-d2f4a7741d00] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1448.095714] env[69994]: DEBUG nova.compute.manager [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1448.122039] env[69994]: DEBUG nova.virt.hardware [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1448.122297] env[69994]: DEBUG nova.virt.hardware [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1448.122456] env[69994]: DEBUG nova.virt.hardware [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1448.122639] env[69994]: DEBUG nova.virt.hardware [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1448.122784] env[69994]: DEBUG nova.virt.hardware [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1448.122928] env[69994]: DEBUG nova.virt.hardware [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1448.123146] env[69994]: DEBUG nova.virt.hardware [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1448.123304] env[69994]: DEBUG nova.virt.hardware [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1448.123468] env[69994]: DEBUG nova.virt.hardware [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1448.123627] env[69994]: DEBUG nova.virt.hardware [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1448.123794] env[69994]: DEBUG nova.virt.hardware [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1448.124673] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83ed2aee-9374-4627-939f-c6bd19d4f080 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.132137] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28007416-98bd-4e64-8d7f-660b4b80156d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.166380] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 3c433e3b-4c16-4cfc-a7d5-40e40b0906b3] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1448.221446] env[69994]: DEBUG nova.compute.manager [req-dc0acdd9-9ce0-4092-a834-8a29214888b7 req-e5f54d3f-dfa8-4df3-b7d1-2e6d306129e2 service nova] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Received event network-vif-plugged-7d02dd51-d5ca-4bf6-babe-300c991fc6bf {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1448.221672] env[69994]: DEBUG oslo_concurrency.lockutils [req-dc0acdd9-9ce0-4092-a834-8a29214888b7 req-e5f54d3f-dfa8-4df3-b7d1-2e6d306129e2 service nova] Acquiring lock "37d54d80-9cad-4cf4-9b8f-4790bbf4664d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1448.221875] env[69994]: DEBUG oslo_concurrency.lockutils [req-dc0acdd9-9ce0-4092-a834-8a29214888b7 req-e5f54d3f-dfa8-4df3-b7d1-2e6d306129e2 service nova] Lock "37d54d80-9cad-4cf4-9b8f-4790bbf4664d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1448.222053] env[69994]: DEBUG oslo_concurrency.lockutils [req-dc0acdd9-9ce0-4092-a834-8a29214888b7 req-e5f54d3f-dfa8-4df3-b7d1-2e6d306129e2 service nova] Lock "37d54d80-9cad-4cf4-9b8f-4790bbf4664d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1448.222216] env[69994]: DEBUG nova.compute.manager [req-dc0acdd9-9ce0-4092-a834-8a29214888b7 req-e5f54d3f-dfa8-4df3-b7d1-2e6d306129e2 service nova] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] No waiting events found dispatching network-vif-plugged-7d02dd51-d5ca-4bf6-babe-300c991fc6bf {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1448.222405] env[69994]: WARNING nova.compute.manager [req-dc0acdd9-9ce0-4092-a834-8a29214888b7 req-e5f54d3f-dfa8-4df3-b7d1-2e6d306129e2 service nova] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Received unexpected event network-vif-plugged-7d02dd51-d5ca-4bf6-babe-300c991fc6bf for instance with vm_state building and task_state spawning. [ 1448.302579] env[69994]: DEBUG nova.network.neutron [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Successfully updated port: 7d02dd51-d5ca-4bf6-babe-300c991fc6bf {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1448.670066] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 902200b2-f2ca-4979-961a-ec046d22d05c] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1448.805444] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "refresh_cache-37d54d80-9cad-4cf4-9b8f-4790bbf4664d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1448.805629] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquired lock "refresh_cache-37d54d80-9cad-4cf4-9b8f-4790bbf4664d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1448.805745] env[69994]: DEBUG nova.network.neutron [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1449.173222] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 4f002725-3f15-4d10-a7ee-07132faf6266] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1449.336308] env[69994]: DEBUG nova.network.neutron [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1449.453402] env[69994]: DEBUG nova.network.neutron [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Updating instance_info_cache with network_info: [{"id": "7d02dd51-d5ca-4bf6-babe-300c991fc6bf", "address": "fa:16:3e:b9:75:29", "network": {"id": "f981c93f-f03a-4cdd-ac65-6c8511d8093a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2111076881-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a7bbd48e22345cc9c3f09a574143d7f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d02dd51-d5", "ovs_interfaceid": "7d02dd51-d5ca-4bf6-babe-300c991fc6bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1449.677068] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: e8c3effc-9430-433f-bf88-b3904cfaa31f] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1449.955895] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Releasing lock "refresh_cache-37d54d80-9cad-4cf4-9b8f-4790bbf4664d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1449.956199] env[69994]: DEBUG nova.compute.manager [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Instance network_info: |[{"id": "7d02dd51-d5ca-4bf6-babe-300c991fc6bf", "address": "fa:16:3e:b9:75:29", "network": {"id": "f981c93f-f03a-4cdd-ac65-6c8511d8093a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2111076881-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a7bbd48e22345cc9c3f09a574143d7f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d02dd51-d5", "ovs_interfaceid": "7d02dd51-d5ca-4bf6-babe-300c991fc6bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1449.956636] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b9:75:29', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f880ac2e-d532-4f54-87bb-998a8d1bca78', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7d02dd51-d5ca-4bf6-babe-300c991fc6bf', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1449.963959] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Creating folder: Project (8a7bbd48e22345cc9c3f09a574143d7f). Parent ref: group-v647729. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1449.964533] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ff2309e8-dc2f-4b19-b436-1a0b5120b6fe {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.976298] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Created folder: Project (8a7bbd48e22345cc9c3f09a574143d7f) in parent group-v647729. [ 1449.976471] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Creating folder: Instances. Parent ref: group-v648070. {{(pid=69994) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1449.976682] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8eb260fb-43fe-47ee-8ff5-65e9341cfceb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.985229] env[69994]: INFO nova.virt.vmwareapi.vm_util [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Created folder: Instances in parent group-v648070. [ 1449.985438] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1449.985610] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1449.985790] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8e33e4fa-a24a-479e-9c84-d8519b83b621 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.003359] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1450.003359] env[69994]: value = "task-3242977" [ 1450.003359] env[69994]: _type = "Task" [ 1450.003359] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.010045] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242977, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.180975] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 7e92935f-fc1f-4893-8f69-4b97e4729a7f] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1450.247959] env[69994]: DEBUG nova.compute.manager [req-a5a45f74-a4b8-4306-b3b7-539276089159 req-75ea5248-b4a8-4318-9495-710fd08f7f96 service nova] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Received event network-changed-7d02dd51-d5ca-4bf6-babe-300c991fc6bf {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1450.248058] env[69994]: DEBUG nova.compute.manager [req-a5a45f74-a4b8-4306-b3b7-539276089159 req-75ea5248-b4a8-4318-9495-710fd08f7f96 service nova] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Refreshing instance network info cache due to event network-changed-7d02dd51-d5ca-4bf6-babe-300c991fc6bf. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1450.248287] env[69994]: DEBUG oslo_concurrency.lockutils [req-a5a45f74-a4b8-4306-b3b7-539276089159 req-75ea5248-b4a8-4318-9495-710fd08f7f96 service nova] Acquiring lock "refresh_cache-37d54d80-9cad-4cf4-9b8f-4790bbf4664d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1450.248494] env[69994]: DEBUG oslo_concurrency.lockutils [req-a5a45f74-a4b8-4306-b3b7-539276089159 req-75ea5248-b4a8-4318-9495-710fd08f7f96 service nova] Acquired lock "refresh_cache-37d54d80-9cad-4cf4-9b8f-4790bbf4664d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1450.248708] env[69994]: DEBUG nova.network.neutron [req-a5a45f74-a4b8-4306-b3b7-539276089159 req-75ea5248-b4a8-4318-9495-710fd08f7f96 service nova] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Refreshing network info cache for port 7d02dd51-d5ca-4bf6-babe-300c991fc6bf {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1450.513344] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242977, 'name': CreateVM_Task, 'duration_secs': 0.289953} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.513527] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1450.514138] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1450.514304] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1450.514674] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1450.514886] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21e97158-5a4f-4af8-aba7-fdd91f15cc50 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.519370] env[69994]: DEBUG oslo_vmware.api [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1450.519370] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5269ad74-1273-dfd3-a42e-e72f5cb66854" [ 1450.519370] env[69994]: _type = "Task" [ 1450.519370] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.526799] env[69994]: DEBUG oslo_vmware.api [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5269ad74-1273-dfd3-a42e-e72f5cb66854, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.684584] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 16f60d87-180a-4e23-9d4b-960220489d33] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1450.961827] env[69994]: DEBUG nova.network.neutron [req-a5a45f74-a4b8-4306-b3b7-539276089159 req-75ea5248-b4a8-4318-9495-710fd08f7f96 service nova] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Updated VIF entry in instance network info cache for port 7d02dd51-d5ca-4bf6-babe-300c991fc6bf. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1450.962203] env[69994]: DEBUG nova.network.neutron [req-a5a45f74-a4b8-4306-b3b7-539276089159 req-75ea5248-b4a8-4318-9495-710fd08f7f96 service nova] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Updating instance_info_cache with network_info: [{"id": "7d02dd51-d5ca-4bf6-babe-300c991fc6bf", "address": "fa:16:3e:b9:75:29", "network": {"id": "f981c93f-f03a-4cdd-ac65-6c8511d8093a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2111076881-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a7bbd48e22345cc9c3f09a574143d7f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d02dd51-d5", "ovs_interfaceid": "7d02dd51-d5ca-4bf6-babe-300c991fc6bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1451.029711] env[69994]: DEBUG oslo_vmware.api [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5269ad74-1273-dfd3-a42e-e72f5cb66854, 'name': SearchDatastore_Task, 'duration_secs': 0.009244} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.029711] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1451.029915] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1451.030159] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1451.030307] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1451.030751] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1451.030751] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e6923f87-9fab-4a14-9f7f-0f90b10c5c68 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.038801] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1451.038979] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1451.039668] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0eeb3180-0781-4606-bb73-bec8708811bd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.044631] env[69994]: DEBUG oslo_vmware.api [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1451.044631] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]521cde22-7366-0af2-903b-4d1493997c30" [ 1451.044631] env[69994]: _type = "Task" [ 1451.044631] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.051515] env[69994]: DEBUG oslo_vmware.api [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521cde22-7366-0af2-903b-4d1493997c30, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.188350] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 71ee4730-f0e5-4c71-8053-be9e73b702a4] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1451.464527] env[69994]: DEBUG oslo_concurrency.lockutils [req-a5a45f74-a4b8-4306-b3b7-539276089159 req-75ea5248-b4a8-4318-9495-710fd08f7f96 service nova] Releasing lock "refresh_cache-37d54d80-9cad-4cf4-9b8f-4790bbf4664d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1451.555796] env[69994]: DEBUG oslo_vmware.api [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]521cde22-7366-0af2-903b-4d1493997c30, 'name': SearchDatastore_Task, 'duration_secs': 0.007999} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.556593] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79e42951-9852-4f08-a8e9-2144bedabc7a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.561351] env[69994]: DEBUG oslo_vmware.api [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1451.561351] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52e83d16-69e6-30c7-22cd-aff4787bfe1d" [ 1451.561351] env[69994]: _type = "Task" [ 1451.561351] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.568418] env[69994]: DEBUG oslo_vmware.api [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52e83d16-69e6-30c7-22cd-aff4787bfe1d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.692195] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 73288b0c-7e85-48cd-9ea1-d08a31a81c32] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1452.071783] env[69994]: DEBUG oslo_vmware.api [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52e83d16-69e6-30c7-22cd-aff4787bfe1d, 'name': SearchDatastore_Task, 'duration_secs': 0.009116} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.072065] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1452.072325] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 37d54d80-9cad-4cf4-9b8f-4790bbf4664d/37d54d80-9cad-4cf4-9b8f-4790bbf4664d.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1452.072586] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cd8b1df0-6bb3-4eab-92d4-542e0030030c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.078865] env[69994]: DEBUG oslo_vmware.api [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1452.078865] env[69994]: value = "task-3242978" [ 1452.078865] env[69994]: _type = "Task" [ 1452.078865] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.086639] env[69994]: DEBUG oslo_vmware.api [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3242978, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.195575] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 217bd31d-f705-4aa7-a8a7-d79e407b7c7b] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1452.588009] env[69994]: DEBUG oslo_vmware.api [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3242978, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.448647} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.588305] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 37d54d80-9cad-4cf4-9b8f-4790bbf4664d/37d54d80-9cad-4cf4-9b8f-4790bbf4664d.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1452.588528] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1452.588785] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2022f9c4-f17b-45ef-b847-3ea797c0eca2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.595384] env[69994]: DEBUG oslo_vmware.api [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1452.595384] env[69994]: value = "task-3242979" [ 1452.595384] env[69994]: _type = "Task" [ 1452.595384] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.603126] env[69994]: DEBUG oslo_vmware.api [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3242979, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.698865] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 1735049d-a240-48fc-a360-3b00b02225b1] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1453.105535] env[69994]: DEBUG oslo_vmware.api [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3242979, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059045} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.105793] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1453.106586] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e4071f5-b771-4b6d-a9da-9cb37803c287 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.128277] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] 37d54d80-9cad-4cf4-9b8f-4790bbf4664d/37d54d80-9cad-4cf4-9b8f-4790bbf4664d.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1453.128524] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-06774b8f-fa75-4203-8e80-794c54779d4a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.146949] env[69994]: DEBUG oslo_vmware.api [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1453.146949] env[69994]: value = "task-3242980" [ 1453.146949] env[69994]: _type = "Task" [ 1453.146949] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.154065] env[69994]: DEBUG oslo_vmware.api [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3242980, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.202767] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: b843a484-0ebd-49ca-a1b6-1c4c0ae9fdff] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1453.657071] env[69994]: DEBUG oslo_vmware.api [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3242980, 'name': ReconfigVM_Task, 'duration_secs': 0.301599} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.657350] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Reconfigured VM instance instance-0000007c to attach disk [datastore1] 37d54d80-9cad-4cf4-9b8f-4790bbf4664d/37d54d80-9cad-4cf4-9b8f-4790bbf4664d.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1453.657985] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a142e024-4145-49c4-aae7-20ee3e3fae5e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.664410] env[69994]: DEBUG oslo_vmware.api [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1453.664410] env[69994]: value = "task-3242981" [ 1453.664410] env[69994]: _type = "Task" [ 1453.664410] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.672470] env[69994]: DEBUG oslo_vmware.api [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3242981, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.706114] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 6b73608e-b62f-4292-870c-51f1c686e569] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1454.173747] env[69994]: DEBUG oslo_vmware.api [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3242981, 'name': Rename_Task, 'duration_secs': 0.133652} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.174030] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1454.174268] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-13159fae-7087-43da-b123-daefce24fe0d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.180017] env[69994]: DEBUG oslo_vmware.api [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1454.180017] env[69994]: value = "task-3242982" [ 1454.180017] env[69994]: _type = "Task" [ 1454.180017] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.187174] env[69994]: DEBUG oslo_vmware.api [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3242982, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.210597] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: eea243fb-97fc-4c65-8699-1b3c321bd250] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1454.689956] env[69994]: DEBUG oslo_vmware.api [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3242982, 'name': PowerOnVM_Task, 'duration_secs': 0.439685} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.690224] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1454.690428] env[69994]: INFO nova.compute.manager [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Took 6.59 seconds to spawn the instance on the hypervisor. [ 1454.690603] env[69994]: DEBUG nova.compute.manager [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1454.691352] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0383314d-aa39-4003-bcd8-2b7ab6606b4c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.716439] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 70fcf5b1-213f-4ff9-b675-282e7aa30e20] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1455.208225] env[69994]: INFO nova.compute.manager [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Took 11.26 seconds to build instance. [ 1455.219883] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 4eb3724f-35ac-4f8d-a742-561b9c0333d8] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1455.710323] env[69994]: DEBUG oslo_concurrency.lockutils [None req-cb379b53-ad11-42d5-b3f6-6819299f5ad3 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "37d54d80-9cad-4cf4-9b8f-4790bbf4664d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.769s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1455.722951] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 90e411dd-26f3-421d-b2d0-620c61fe8476] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1455.831181] env[69994]: DEBUG nova.compute.manager [req-08b6fc4d-959b-4809-8da4-05d20c001d09 req-e4b0c32f-500b-4001-bcd5-1934b3acf7ab service nova] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Received event network-changed-7d02dd51-d5ca-4bf6-babe-300c991fc6bf {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1455.832042] env[69994]: DEBUG nova.compute.manager [req-08b6fc4d-959b-4809-8da4-05d20c001d09 req-e4b0c32f-500b-4001-bcd5-1934b3acf7ab service nova] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Refreshing instance network info cache due to event network-changed-7d02dd51-d5ca-4bf6-babe-300c991fc6bf. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1455.832042] env[69994]: DEBUG oslo_concurrency.lockutils [req-08b6fc4d-959b-4809-8da4-05d20c001d09 req-e4b0c32f-500b-4001-bcd5-1934b3acf7ab service nova] Acquiring lock "refresh_cache-37d54d80-9cad-4cf4-9b8f-4790bbf4664d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1455.832042] env[69994]: DEBUG oslo_concurrency.lockutils [req-08b6fc4d-959b-4809-8da4-05d20c001d09 req-e4b0c32f-500b-4001-bcd5-1934b3acf7ab service nova] Acquired lock "refresh_cache-37d54d80-9cad-4cf4-9b8f-4790bbf4664d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1455.832042] env[69994]: DEBUG nova.network.neutron [req-08b6fc4d-959b-4809-8da4-05d20c001d09 req-e4b0c32f-500b-4001-bcd5-1934b3acf7ab service nova] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Refreshing network info cache for port 7d02dd51-d5ca-4bf6-babe-300c991fc6bf {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1456.226339] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: d31f167f-8248-4aef-aa3c-6bc7259e1a80] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1456.526627] env[69994]: DEBUG nova.network.neutron [req-08b6fc4d-959b-4809-8da4-05d20c001d09 req-e4b0c32f-500b-4001-bcd5-1934b3acf7ab service nova] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Updated VIF entry in instance network info cache for port 7d02dd51-d5ca-4bf6-babe-300c991fc6bf. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1456.526961] env[69994]: DEBUG nova.network.neutron [req-08b6fc4d-959b-4809-8da4-05d20c001d09 req-e4b0c32f-500b-4001-bcd5-1934b3acf7ab service nova] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Updating instance_info_cache with network_info: [{"id": "7d02dd51-d5ca-4bf6-babe-300c991fc6bf", "address": "fa:16:3e:b9:75:29", "network": {"id": "f981c93f-f03a-4cdd-ac65-6c8511d8093a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2111076881-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.214", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a7bbd48e22345cc9c3f09a574143d7f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d02dd51-d5", "ovs_interfaceid": "7d02dd51-d5ca-4bf6-babe-300c991fc6bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1456.730064] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 03e58b14-12fe-46e5-b483-4176d5a43c0e] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1457.029745] env[69994]: DEBUG oslo_concurrency.lockutils [req-08b6fc4d-959b-4809-8da4-05d20c001d09 req-e4b0c32f-500b-4001-bcd5-1934b3acf7ab service nova] Releasing lock "refresh_cache-37d54d80-9cad-4cf4-9b8f-4790bbf4664d" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1457.234024] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 65facb63-1323-4905-b107-a5c5782d4a4c] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1457.737819] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: ca237467-eafc-4c18-a56e-98b94d111c92] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1458.242374] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: be421d40-9859-4e0d-aef8-a2feb8717a78] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1458.746809] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: d8d2958c-e44c-4796-becc-c572057f7ba5] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1459.249957] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: 19fedc80-8def-426a-af73-ad871e127e02] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1459.753382] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: e6acdc45-5e8f-4ff0-9259-3de73a6fdd14] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1460.256569] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: ef37ce64-2c26-4080-899a-6d9dbb5850c9] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1460.759731] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: eff21ec5-a51d-4004-9edf-1891f706fe9c] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1461.263537] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1461.263950] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Cleaning up deleted instances with incomplete migration {{(pid=69994) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11872}} [ 1461.766551] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1463.268939] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1463.269395] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1463.269395] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1463.269483] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1463.269634] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1463.269785] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1463.269931] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1463.270083] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69994) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1463.270230] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1463.774058] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1463.774356] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1463.774533] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1463.774692] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69994) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1463.775657] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16543e2f-388e-4ab0-86d2-e6ec3f4b8b0a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.784211] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-567f10ab-8465-48e8-8c8a-7196070089c9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.797976] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d43e5dd4-5d43-43cf-8d20-da8731dc9546 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.803788] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41d40219-c284-4d95-8fd9-b45c9a847303 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.833020] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179973MB free_disk=120GB free_vcpus=48 pci_devices=None {{(pid=69994) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1463.833198] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1463.833385] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1464.922649] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 37d54d80-9cad-4cf4-9b8f-4790bbf4664d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1464.922913] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1464.923075] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1464.948161] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-117ac38b-2a58-475b-bdc3-e966a6a293d9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.955424] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51197cb3-59b5-4f0d-89d4-93c9d612af5e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.983869] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-388de5cf-c854-41e2-9b2c-5707066131ca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.990531] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bec31de-8782-4199-b5fb-691321690266 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.004390] env[69994]: DEBUG nova.compute.provider_tree [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1465.507267] env[69994]: DEBUG nova.scheduler.client.report [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1466.013123] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69994) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1466.013552] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.180s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1495.029024] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4a93ffe0-fa16-485e-bd66-763d5b924ee0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "37d54d80-9cad-4cf4-9b8f-4790bbf4664d" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1495.029350] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4a93ffe0-fa16-485e-bd66-763d5b924ee0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "37d54d80-9cad-4cf4-9b8f-4790bbf4664d" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1495.532656] env[69994]: DEBUG nova.compute.utils [None req-4a93ffe0-fa16-485e-bd66-763d5b924ee0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1496.036325] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4a93ffe0-fa16-485e-bd66-763d5b924ee0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "37d54d80-9cad-4cf4-9b8f-4790bbf4664d" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1497.094552] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4a93ffe0-fa16-485e-bd66-763d5b924ee0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "37d54d80-9cad-4cf4-9b8f-4790bbf4664d" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1497.096206] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4a93ffe0-fa16-485e-bd66-763d5b924ee0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "37d54d80-9cad-4cf4-9b8f-4790bbf4664d" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1497.096206] env[69994]: INFO nova.compute.manager [None req-4a93ffe0-fa16-485e-bd66-763d5b924ee0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Attaching volume 23f63f26-2383-4b45-8ba9-421f266b1b57 to /dev/sdb [ 1497.125339] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-285c5d95-961f-40e1-baa9-7c7d345be7f0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.132578] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89930eb8-ec34-4a0a-b2dd-16fb24f1ecc3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.145019] env[69994]: DEBUG nova.virt.block_device [None req-4a93ffe0-fa16-485e-bd66-763d5b924ee0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Updating existing volume attachment record: e025243e-4a1a-4862-b43a-ff9dd639e24f {{(pid=69994) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1502.188239] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a93ffe0-fa16-485e-bd66-763d5b924ee0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Volume attach. Driver type: vmdk {{(pid=69994) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1502.188523] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a93ffe0-fa16-485e-bd66-763d5b924ee0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-648074', 'volume_id': '23f63f26-2383-4b45-8ba9-421f266b1b57', 'name': 'volume-23f63f26-2383-4b45-8ba9-421f266b1b57', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '37d54d80-9cad-4cf4-9b8f-4790bbf4664d', 'attached_at': '', 'detached_at': '', 'volume_id': '23f63f26-2383-4b45-8ba9-421f266b1b57', 'serial': '23f63f26-2383-4b45-8ba9-421f266b1b57'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1502.189391] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aa01b60-7ec4-457f-b843-d9af012aac03 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.206597] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5627c2c-4ab0-418f-af4a-9d67e4458d69 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.229933] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a93ffe0-fa16-485e-bd66-763d5b924ee0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Reconfiguring VM instance instance-0000007c to attach disk [datastore2] volume-23f63f26-2383-4b45-8ba9-421f266b1b57/volume-23f63f26-2383-4b45-8ba9-421f266b1b57.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1502.230169] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9a4c8d2b-cc2e-44c2-8988-59cbaaad705d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.247303] env[69994]: DEBUG oslo_vmware.api [None req-4a93ffe0-fa16-485e-bd66-763d5b924ee0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1502.247303] env[69994]: value = "task-3242987" [ 1502.247303] env[69994]: _type = "Task" [ 1502.247303] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.254263] env[69994]: DEBUG oslo_vmware.api [None req-4a93ffe0-fa16-485e-bd66-763d5b924ee0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3242987, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.757759] env[69994]: DEBUG oslo_vmware.api [None req-4a93ffe0-fa16-485e-bd66-763d5b924ee0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3242987, 'name': ReconfigVM_Task, 'duration_secs': 0.346031} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1502.758033] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a93ffe0-fa16-485e-bd66-763d5b924ee0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Reconfigured VM instance instance-0000007c to attach disk [datastore2] volume-23f63f26-2383-4b45-8ba9-421f266b1b57/volume-23f63f26-2383-4b45-8ba9-421f266b1b57.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1502.762615] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8eb9e19f-3c36-4cba-a987-e3baf7887321 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.776870] env[69994]: DEBUG oslo_vmware.api [None req-4a93ffe0-fa16-485e-bd66-763d5b924ee0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1502.776870] env[69994]: value = "task-3242988" [ 1502.776870] env[69994]: _type = "Task" [ 1502.776870] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.783925] env[69994]: DEBUG oslo_vmware.api [None req-4a93ffe0-fa16-485e-bd66-763d5b924ee0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3242988, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.286466] env[69994]: DEBUG oslo_vmware.api [None req-4a93ffe0-fa16-485e-bd66-763d5b924ee0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3242988, 'name': ReconfigVM_Task, 'duration_secs': 0.140531} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.286843] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a93ffe0-fa16-485e-bd66-763d5b924ee0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-648074', 'volume_id': '23f63f26-2383-4b45-8ba9-421f266b1b57', 'name': 'volume-23f63f26-2383-4b45-8ba9-421f266b1b57', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '37d54d80-9cad-4cf4-9b8f-4790bbf4664d', 'attached_at': '', 'detached_at': '', 'volume_id': '23f63f26-2383-4b45-8ba9-421f266b1b57', 'serial': '23f63f26-2383-4b45-8ba9-421f266b1b57'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1504.340567] env[69994]: DEBUG nova.objects.instance [None req-4a93ffe0-fa16-485e-bd66-763d5b924ee0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lazy-loading 'flavor' on Instance uuid 37d54d80-9cad-4cf4-9b8f-4790bbf4664d {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1504.848066] env[69994]: DEBUG oslo_concurrency.lockutils [None req-4a93ffe0-fa16-485e-bd66-763d5b924ee0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "37d54d80-9cad-4cf4-9b8f-4790bbf4664d" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.753s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1506.398680] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "be58b2df-3231-418d-8772-f377eb27f878" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1506.399076] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "be58b2df-3231-418d-8772-f377eb27f878" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1506.901014] env[69994]: DEBUG nova.compute.manager [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: be58b2df-3231-418d-8772-f377eb27f878] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1507.423161] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1507.423595] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1507.425894] env[69994]: INFO nova.compute.claims [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: be58b2df-3231-418d-8772-f377eb27f878] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1508.470965] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb3a4678-b9ee-4987-a69e-7e1ca3b27454 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.478793] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3168f4bd-8b59-44ee-927a-817a8e99c794 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.508279] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f9becd8-a4f8-49a2-b802-b967f914d6f7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.515089] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18aa1a25-40ab-4d61-a24f-9546c42a8af1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.528506] env[69994]: DEBUG nova.compute.provider_tree [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1509.032053] env[69994]: DEBUG nova.scheduler.client.report [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1509.538105] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.114s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1509.538658] env[69994]: DEBUG nova.compute.manager [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: be58b2df-3231-418d-8772-f377eb27f878] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1510.043334] env[69994]: DEBUG nova.compute.utils [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1510.044747] env[69994]: DEBUG nova.compute.manager [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: be58b2df-3231-418d-8772-f377eb27f878] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1510.044919] env[69994]: DEBUG nova.network.neutron [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: be58b2df-3231-418d-8772-f377eb27f878] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1510.100726] env[69994]: DEBUG nova.policy [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c4eba6562c3a41d0bfb7aeb393a600b3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8a7bbd48e22345cc9c3f09a574143d7f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1510.359148] env[69994]: DEBUG nova.network.neutron [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: be58b2df-3231-418d-8772-f377eb27f878] Successfully created port: a1097db7-be78-4408-acad-2dcef90b27c7 {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1510.548721] env[69994]: DEBUG nova.compute.manager [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: be58b2df-3231-418d-8772-f377eb27f878] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1511.558806] env[69994]: DEBUG nova.compute.manager [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: be58b2df-3231-418d-8772-f377eb27f878] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1511.583377] env[69994]: DEBUG nova.virt.hardware [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1511.583615] env[69994]: DEBUG nova.virt.hardware [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1511.583775] env[69994]: DEBUG nova.virt.hardware [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1511.583955] env[69994]: DEBUG nova.virt.hardware [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1511.584113] env[69994]: DEBUG nova.virt.hardware [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1511.584261] env[69994]: DEBUG nova.virt.hardware [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1511.584461] env[69994]: DEBUG nova.virt.hardware [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1511.584623] env[69994]: DEBUG nova.virt.hardware [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1511.584820] env[69994]: DEBUG nova.virt.hardware [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1511.585055] env[69994]: DEBUG nova.virt.hardware [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1511.585302] env[69994]: DEBUG nova.virt.hardware [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1511.586212] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a60ac616-b7cb-40f1-8656-8e437cf18237 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.594108] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e6185e7-38cd-45bd-85be-84ffd5c7058a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.754520] env[69994]: DEBUG nova.compute.manager [req-b4383076-e6b2-4db6-892c-2ad37d6f8001 req-19a804c1-39fc-4dcc-a30b-29e78daa8e32 service nova] [instance: be58b2df-3231-418d-8772-f377eb27f878] Received event network-vif-plugged-a1097db7-be78-4408-acad-2dcef90b27c7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1511.754822] env[69994]: DEBUG oslo_concurrency.lockutils [req-b4383076-e6b2-4db6-892c-2ad37d6f8001 req-19a804c1-39fc-4dcc-a30b-29e78daa8e32 service nova] Acquiring lock "be58b2df-3231-418d-8772-f377eb27f878-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1511.754969] env[69994]: DEBUG oslo_concurrency.lockutils [req-b4383076-e6b2-4db6-892c-2ad37d6f8001 req-19a804c1-39fc-4dcc-a30b-29e78daa8e32 service nova] Lock "be58b2df-3231-418d-8772-f377eb27f878-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1511.755110] env[69994]: DEBUG oslo_concurrency.lockutils [req-b4383076-e6b2-4db6-892c-2ad37d6f8001 req-19a804c1-39fc-4dcc-a30b-29e78daa8e32 service nova] Lock "be58b2df-3231-418d-8772-f377eb27f878-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1511.755299] env[69994]: DEBUG nova.compute.manager [req-b4383076-e6b2-4db6-892c-2ad37d6f8001 req-19a804c1-39fc-4dcc-a30b-29e78daa8e32 service nova] [instance: be58b2df-3231-418d-8772-f377eb27f878] No waiting events found dispatching network-vif-plugged-a1097db7-be78-4408-acad-2dcef90b27c7 {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1511.755427] env[69994]: WARNING nova.compute.manager [req-b4383076-e6b2-4db6-892c-2ad37d6f8001 req-19a804c1-39fc-4dcc-a30b-29e78daa8e32 service nova] [instance: be58b2df-3231-418d-8772-f377eb27f878] Received unexpected event network-vif-plugged-a1097db7-be78-4408-acad-2dcef90b27c7 for instance with vm_state building and task_state spawning. [ 1511.798839] env[69994]: DEBUG nova.network.neutron [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: be58b2df-3231-418d-8772-f377eb27f878] Successfully updated port: a1097db7-be78-4408-acad-2dcef90b27c7 {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1512.301710] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "refresh_cache-be58b2df-3231-418d-8772-f377eb27f878" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1512.301874] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquired lock "refresh_cache-be58b2df-3231-418d-8772-f377eb27f878" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1512.302045] env[69994]: DEBUG nova.network.neutron [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: be58b2df-3231-418d-8772-f377eb27f878] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1512.833152] env[69994]: DEBUG nova.network.neutron [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: be58b2df-3231-418d-8772-f377eb27f878] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1512.945975] env[69994]: DEBUG nova.network.neutron [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: be58b2df-3231-418d-8772-f377eb27f878] Updating instance_info_cache with network_info: [{"id": "a1097db7-be78-4408-acad-2dcef90b27c7", "address": "fa:16:3e:ea:9b:ea", "network": {"id": "f981c93f-f03a-4cdd-ac65-6c8511d8093a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2111076881-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a7bbd48e22345cc9c3f09a574143d7f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1097db7-be", "ovs_interfaceid": "a1097db7-be78-4408-acad-2dcef90b27c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1513.448633] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Releasing lock "refresh_cache-be58b2df-3231-418d-8772-f377eb27f878" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1513.448992] env[69994]: DEBUG nova.compute.manager [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: be58b2df-3231-418d-8772-f377eb27f878] Instance network_info: |[{"id": "a1097db7-be78-4408-acad-2dcef90b27c7", "address": "fa:16:3e:ea:9b:ea", "network": {"id": "f981c93f-f03a-4cdd-ac65-6c8511d8093a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2111076881-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a7bbd48e22345cc9c3f09a574143d7f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1097db7-be", "ovs_interfaceid": "a1097db7-be78-4408-acad-2dcef90b27c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1513.449434] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: be58b2df-3231-418d-8772-f377eb27f878] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ea:9b:ea', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f880ac2e-d532-4f54-87bb-998a8d1bca78', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a1097db7-be78-4408-acad-2dcef90b27c7', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1513.456840] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1513.457055] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: be58b2df-3231-418d-8772-f377eb27f878] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1513.457280] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-caea5ab6-be57-42a6-984a-2c13ffe99840 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.476869] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1513.476869] env[69994]: value = "task-3242989" [ 1513.476869] env[69994]: _type = "Task" [ 1513.476869] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1513.486369] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242989, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.782362] env[69994]: DEBUG nova.compute.manager [req-ffdd0fe0-12e7-413e-894b-fa8b6b84b2b5 req-45338cb9-3411-4071-8b9a-79f3da4e139f service nova] [instance: be58b2df-3231-418d-8772-f377eb27f878] Received event network-changed-a1097db7-be78-4408-acad-2dcef90b27c7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1513.782533] env[69994]: DEBUG nova.compute.manager [req-ffdd0fe0-12e7-413e-894b-fa8b6b84b2b5 req-45338cb9-3411-4071-8b9a-79f3da4e139f service nova] [instance: be58b2df-3231-418d-8772-f377eb27f878] Refreshing instance network info cache due to event network-changed-a1097db7-be78-4408-acad-2dcef90b27c7. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1513.782748] env[69994]: DEBUG oslo_concurrency.lockutils [req-ffdd0fe0-12e7-413e-894b-fa8b6b84b2b5 req-45338cb9-3411-4071-8b9a-79f3da4e139f service nova] Acquiring lock "refresh_cache-be58b2df-3231-418d-8772-f377eb27f878" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1513.782892] env[69994]: DEBUG oslo_concurrency.lockutils [req-ffdd0fe0-12e7-413e-894b-fa8b6b84b2b5 req-45338cb9-3411-4071-8b9a-79f3da4e139f service nova] Acquired lock "refresh_cache-be58b2df-3231-418d-8772-f377eb27f878" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1513.783063] env[69994]: DEBUG nova.network.neutron [req-ffdd0fe0-12e7-413e-894b-fa8b6b84b2b5 req-45338cb9-3411-4071-8b9a-79f3da4e139f service nova] [instance: be58b2df-3231-418d-8772-f377eb27f878] Refreshing network info cache for port a1097db7-be78-4408-acad-2dcef90b27c7 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1513.884684] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1513.885065] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1513.986383] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3242989, 'name': CreateVM_Task, 'duration_secs': 0.29967} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1513.986599] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: be58b2df-3231-418d-8772-f377eb27f878] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1513.987216] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1513.987383] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1513.987718] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1513.987963] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-633bd0b0-3f22-4de3-9d17-6ebfaf60fae5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.992228] env[69994]: DEBUG oslo_vmware.api [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1513.992228] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52456b45-65e9-2451-dd0d-7199a9f30b50" [ 1513.992228] env[69994]: _type = "Task" [ 1513.992228] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1513.999641] env[69994]: DEBUG oslo_vmware.api [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52456b45-65e9-2451-dd0d-7199a9f30b50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.390681] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1514.392025] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1514.392025] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1514.392025] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1514.392025] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1514.392025] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69994) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1514.392568] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1514.502438] env[69994]: DEBUG oslo_vmware.api [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52456b45-65e9-2451-dd0d-7199a9f30b50, 'name': SearchDatastore_Task, 'duration_secs': 0.010428} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.502721] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1514.502947] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: be58b2df-3231-418d-8772-f377eb27f878] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1514.503195] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1514.503341] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1514.503519] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1514.503767] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-860b187b-a01c-43fd-83ce-edf72490d620 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.512943] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1514.513144] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1514.514358] env[69994]: DEBUG nova.network.neutron [req-ffdd0fe0-12e7-413e-894b-fa8b6b84b2b5 req-45338cb9-3411-4071-8b9a-79f3da4e139f service nova] [instance: be58b2df-3231-418d-8772-f377eb27f878] Updated VIF entry in instance network info cache for port a1097db7-be78-4408-acad-2dcef90b27c7. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1514.514695] env[69994]: DEBUG nova.network.neutron [req-ffdd0fe0-12e7-413e-894b-fa8b6b84b2b5 req-45338cb9-3411-4071-8b9a-79f3da4e139f service nova] [instance: be58b2df-3231-418d-8772-f377eb27f878] Updating instance_info_cache with network_info: [{"id": "a1097db7-be78-4408-acad-2dcef90b27c7", "address": "fa:16:3e:ea:9b:ea", "network": {"id": "f981c93f-f03a-4cdd-ac65-6c8511d8093a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2111076881-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a7bbd48e22345cc9c3f09a574143d7f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1097db7-be", "ovs_interfaceid": "a1097db7-be78-4408-acad-2dcef90b27c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1514.515743] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c5e130d-fa7f-488a-a4ef-dd8ccf21701d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.521744] env[69994]: DEBUG oslo_vmware.api [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1514.521744] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52fd75d8-db14-4cd9-f869-878bd9d5ec9e" [ 1514.521744] env[69994]: _type = "Task" [ 1514.521744] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.529387] env[69994]: DEBUG oslo_vmware.api [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52fd75d8-db14-4cd9-f869-878bd9d5ec9e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.896130] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1514.896492] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1514.896557] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1514.896686] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69994) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1514.897554] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95b0762a-1626-4b54-b65c-905cb6c064a7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.905468] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b745b83a-ce7d-421d-be5a-6b12350b4037 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.918817] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9268cbd-1c7f-4017-955e-abf6eac75ed2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.925101] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63f1b064-f259-4765-b9f4-e96efc5ae67b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.954722] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180583MB free_disk=120GB free_vcpus=48 pci_devices=None {{(pid=69994) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1514.954868] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1514.955080] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1515.019438] env[69994]: DEBUG oslo_concurrency.lockutils [req-ffdd0fe0-12e7-413e-894b-fa8b6b84b2b5 req-45338cb9-3411-4071-8b9a-79f3da4e139f service nova] Releasing lock "refresh_cache-be58b2df-3231-418d-8772-f377eb27f878" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1515.030696] env[69994]: DEBUG oslo_vmware.api [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52fd75d8-db14-4cd9-f869-878bd9d5ec9e, 'name': SearchDatastore_Task, 'duration_secs': 0.009197} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.031443] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3609836a-0c08-445d-bde7-7b64508748e9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.036428] env[69994]: DEBUG oslo_vmware.api [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1515.036428] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]5212ed0b-5fdc-1d00-23ee-d3079270ff85" [ 1515.036428] env[69994]: _type = "Task" [ 1515.036428] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.044200] env[69994]: DEBUG oslo_vmware.api [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5212ed0b-5fdc-1d00-23ee-d3079270ff85, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.546653] env[69994]: DEBUG oslo_vmware.api [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]5212ed0b-5fdc-1d00-23ee-d3079270ff85, 'name': SearchDatastore_Task, 'duration_secs': 0.009253} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.546921] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1515.547197] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] be58b2df-3231-418d-8772-f377eb27f878/be58b2df-3231-418d-8772-f377eb27f878.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1515.547451] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b9f5b140-1d8a-44c7-9021-0d1e27afd1b7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.554586] env[69994]: DEBUG oslo_vmware.api [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1515.554586] env[69994]: value = "task-3242990" [ 1515.554586] env[69994]: _type = "Task" [ 1515.554586] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.561995] env[69994]: DEBUG oslo_vmware.api [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3242990, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.982058] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 37d54d80-9cad-4cf4-9b8f-4790bbf4664d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1515.982380] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance be58b2df-3231-418d-8772-f377eb27f878 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1515.982446] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1515.982608] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1516.019954] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e78c0ab-6847-4e1c-906c-37174c2947f2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.027040] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1a74943-954b-466c-a45f-438447a559b6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.059467] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41ffde9f-5c90-4659-8dc3-807c84369841 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.069028] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb3a27cc-0aed-4988-a5ad-52c97d279707 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.072528] env[69994]: DEBUG oslo_vmware.api [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3242990, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.43524} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.072767] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] be58b2df-3231-418d-8772-f377eb27f878/be58b2df-3231-418d-8772-f377eb27f878.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1516.072970] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: be58b2df-3231-418d-8772-f377eb27f878] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1516.073537] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6f2df3cd-abe7-4fc1-9ae7-c695cc494297 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.082729] env[69994]: DEBUG nova.compute.provider_tree [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1516.088343] env[69994]: DEBUG oslo_vmware.api [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1516.088343] env[69994]: value = "task-3242991" [ 1516.088343] env[69994]: _type = "Task" [ 1516.088343] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.097124] env[69994]: DEBUG oslo_vmware.api [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3242991, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.586378] env[69994]: DEBUG nova.scheduler.client.report [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1516.598020] env[69994]: DEBUG oslo_vmware.api [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3242991, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058665} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.598307] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: be58b2df-3231-418d-8772-f377eb27f878] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1516.599022] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6a8b404-eaae-48d4-a82b-75c312a5eee2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.621432] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: be58b2df-3231-418d-8772-f377eb27f878] Reconfiguring VM instance instance-0000007d to attach disk [datastore2] be58b2df-3231-418d-8772-f377eb27f878/be58b2df-3231-418d-8772-f377eb27f878.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1516.621954] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0e86ca03-0e55-4430-a249-2cace1866c5d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.642326] env[69994]: DEBUG oslo_vmware.api [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1516.642326] env[69994]: value = "task-3242992" [ 1516.642326] env[69994]: _type = "Task" [ 1516.642326] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.649918] env[69994]: DEBUG oslo_vmware.api [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3242992, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.093318] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69994) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1517.093752] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.138s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1517.151672] env[69994]: DEBUG oslo_vmware.api [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3242992, 'name': ReconfigVM_Task, 'duration_secs': 0.266203} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.151936] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: be58b2df-3231-418d-8772-f377eb27f878] Reconfigured VM instance instance-0000007d to attach disk [datastore2] be58b2df-3231-418d-8772-f377eb27f878/be58b2df-3231-418d-8772-f377eb27f878.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1517.152557] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-89223a2a-66a4-4c5a-a261-5f9332d9fc72 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.158583] env[69994]: DEBUG oslo_vmware.api [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1517.158583] env[69994]: value = "task-3242993" [ 1517.158583] env[69994]: _type = "Task" [ 1517.158583] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.166029] env[69994]: DEBUG oslo_vmware.api [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3242993, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.670065] env[69994]: DEBUG oslo_vmware.api [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3242993, 'name': Rename_Task, 'duration_secs': 0.131735} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.670347] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: be58b2df-3231-418d-8772-f377eb27f878] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1517.670591] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f283a411-5bee-4061-bbf7-15aaba5e0e6e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.676174] env[69994]: DEBUG oslo_vmware.api [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1517.676174] env[69994]: value = "task-3242994" [ 1517.676174] env[69994]: _type = "Task" [ 1517.676174] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.684489] env[69994]: DEBUG oslo_vmware.api [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3242994, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.848324] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1518.186355] env[69994]: DEBUG oslo_vmware.api [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3242994, 'name': PowerOnVM_Task, 'duration_secs': 0.446464} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.186776] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: be58b2df-3231-418d-8772-f377eb27f878] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1518.186840] env[69994]: INFO nova.compute.manager [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: be58b2df-3231-418d-8772-f377eb27f878] Took 6.63 seconds to spawn the instance on the hypervisor. [ 1518.187086] env[69994]: DEBUG nova.compute.manager [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: be58b2df-3231-418d-8772-f377eb27f878] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1518.188737] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49d9d45e-5cb4-4e7c-bab1-205201243eb3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.707558] env[69994]: INFO nova.compute.manager [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: be58b2df-3231-418d-8772-f377eb27f878] Took 11.30 seconds to build instance. [ 1519.210112] env[69994]: DEBUG oslo_concurrency.lockutils [None req-59c2fe63-b406-4e1a-9244-03070764f1a5 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "be58b2df-3231-418d-8772-f377eb27f878" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.811s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1519.435437] env[69994]: DEBUG nova.compute.manager [req-172f8269-eccf-43c6-ac0f-9a61976d058a req-6e873c4a-2193-4152-a2ab-dd3d9babebe0 service nova] [instance: be58b2df-3231-418d-8772-f377eb27f878] Received event network-changed-a1097db7-be78-4408-acad-2dcef90b27c7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1519.435437] env[69994]: DEBUG nova.compute.manager [req-172f8269-eccf-43c6-ac0f-9a61976d058a req-6e873c4a-2193-4152-a2ab-dd3d9babebe0 service nova] [instance: be58b2df-3231-418d-8772-f377eb27f878] Refreshing instance network info cache due to event network-changed-a1097db7-be78-4408-acad-2dcef90b27c7. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1519.435437] env[69994]: DEBUG oslo_concurrency.lockutils [req-172f8269-eccf-43c6-ac0f-9a61976d058a req-6e873c4a-2193-4152-a2ab-dd3d9babebe0 service nova] Acquiring lock "refresh_cache-be58b2df-3231-418d-8772-f377eb27f878" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1519.435437] env[69994]: DEBUG oslo_concurrency.lockutils [req-172f8269-eccf-43c6-ac0f-9a61976d058a req-6e873c4a-2193-4152-a2ab-dd3d9babebe0 service nova] Acquired lock "refresh_cache-be58b2df-3231-418d-8772-f377eb27f878" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1519.435437] env[69994]: DEBUG nova.network.neutron [req-172f8269-eccf-43c6-ac0f-9a61976d058a req-6e873c4a-2193-4152-a2ab-dd3d9babebe0 service nova] [instance: be58b2df-3231-418d-8772-f377eb27f878] Refreshing network info cache for port a1097db7-be78-4408-acad-2dcef90b27c7 {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1520.122177] env[69994]: DEBUG nova.network.neutron [req-172f8269-eccf-43c6-ac0f-9a61976d058a req-6e873c4a-2193-4152-a2ab-dd3d9babebe0 service nova] [instance: be58b2df-3231-418d-8772-f377eb27f878] Updated VIF entry in instance network info cache for port a1097db7-be78-4408-acad-2dcef90b27c7. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1520.122532] env[69994]: DEBUG nova.network.neutron [req-172f8269-eccf-43c6-ac0f-9a61976d058a req-6e873c4a-2193-4152-a2ab-dd3d9babebe0 service nova] [instance: be58b2df-3231-418d-8772-f377eb27f878] Updating instance_info_cache with network_info: [{"id": "a1097db7-be78-4408-acad-2dcef90b27c7", "address": "fa:16:3e:ea:9b:ea", "network": {"id": "f981c93f-f03a-4cdd-ac65-6c8511d8093a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2111076881-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a7bbd48e22345cc9c3f09a574143d7f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1097db7-be", "ovs_interfaceid": "a1097db7-be78-4408-acad-2dcef90b27c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1520.625072] env[69994]: DEBUG oslo_concurrency.lockutils [req-172f8269-eccf-43c6-ac0f-9a61976d058a req-6e873c4a-2193-4152-a2ab-dd3d9babebe0 service nova] Releasing lock "refresh_cache-be58b2df-3231-418d-8772-f377eb27f878" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1557.182342] env[69994]: DEBUG oslo_concurrency.lockutils [None req-725dbbb4-cf81-4e2b-9a2d-cefd1e432133 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "be58b2df-3231-418d-8772-f377eb27f878" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1557.182750] env[69994]: DEBUG oslo_concurrency.lockutils [None req-725dbbb4-cf81-4e2b-9a2d-cefd1e432133 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "be58b2df-3231-418d-8772-f377eb27f878" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1557.182823] env[69994]: DEBUG oslo_concurrency.lockutils [None req-725dbbb4-cf81-4e2b-9a2d-cefd1e432133 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "be58b2df-3231-418d-8772-f377eb27f878-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1557.182975] env[69994]: DEBUG oslo_concurrency.lockutils [None req-725dbbb4-cf81-4e2b-9a2d-cefd1e432133 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "be58b2df-3231-418d-8772-f377eb27f878-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1557.183188] env[69994]: DEBUG oslo_concurrency.lockutils [None req-725dbbb4-cf81-4e2b-9a2d-cefd1e432133 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "be58b2df-3231-418d-8772-f377eb27f878-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1557.185355] env[69994]: INFO nova.compute.manager [None req-725dbbb4-cf81-4e2b-9a2d-cefd1e432133 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: be58b2df-3231-418d-8772-f377eb27f878] Terminating instance [ 1557.689394] env[69994]: DEBUG nova.compute.manager [None req-725dbbb4-cf81-4e2b-9a2d-cefd1e432133 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: be58b2df-3231-418d-8772-f377eb27f878] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1557.689656] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-725dbbb4-cf81-4e2b-9a2d-cefd1e432133 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: be58b2df-3231-418d-8772-f377eb27f878] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1557.690664] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e2ebe9a-6895-432d-bf8e-79d7edaebef0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.698912] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-725dbbb4-cf81-4e2b-9a2d-cefd1e432133 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: be58b2df-3231-418d-8772-f377eb27f878] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1557.699163] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5aeabcd0-f3b0-4a9e-a35a-c5f50f5c15d0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.705831] env[69994]: DEBUG oslo_vmware.api [None req-725dbbb4-cf81-4e2b-9a2d-cefd1e432133 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1557.705831] env[69994]: value = "task-3242995" [ 1557.705831] env[69994]: _type = "Task" [ 1557.705831] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.714616] env[69994]: DEBUG oslo_vmware.api [None req-725dbbb4-cf81-4e2b-9a2d-cefd1e432133 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3242995, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.215825] env[69994]: DEBUG oslo_vmware.api [None req-725dbbb4-cf81-4e2b-9a2d-cefd1e432133 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3242995, 'name': PowerOffVM_Task, 'duration_secs': 0.19573} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.216272] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-725dbbb4-cf81-4e2b-9a2d-cefd1e432133 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: be58b2df-3231-418d-8772-f377eb27f878] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1558.216466] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-725dbbb4-cf81-4e2b-9a2d-cefd1e432133 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: be58b2df-3231-418d-8772-f377eb27f878] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1558.216757] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-662f15e7-c6fa-4375-b02c-c53bc7163432 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.277455] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-725dbbb4-cf81-4e2b-9a2d-cefd1e432133 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: be58b2df-3231-418d-8772-f377eb27f878] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1558.277691] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-725dbbb4-cf81-4e2b-9a2d-cefd1e432133 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: be58b2df-3231-418d-8772-f377eb27f878] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1558.277856] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-725dbbb4-cf81-4e2b-9a2d-cefd1e432133 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Deleting the datastore file [datastore2] be58b2df-3231-418d-8772-f377eb27f878 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1558.278153] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8ec8eef0-55ba-4fce-a04d-30d2190eee1b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.284132] env[69994]: DEBUG oslo_vmware.api [None req-725dbbb4-cf81-4e2b-9a2d-cefd1e432133 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1558.284132] env[69994]: value = "task-3242997" [ 1558.284132] env[69994]: _type = "Task" [ 1558.284132] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.291308] env[69994]: DEBUG oslo_vmware.api [None req-725dbbb4-cf81-4e2b-9a2d-cefd1e432133 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3242997, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.794383] env[69994]: DEBUG oslo_vmware.api [None req-725dbbb4-cf81-4e2b-9a2d-cefd1e432133 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3242997, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.123892} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.794599] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-725dbbb4-cf81-4e2b-9a2d-cefd1e432133 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1558.794783] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-725dbbb4-cf81-4e2b-9a2d-cefd1e432133 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: be58b2df-3231-418d-8772-f377eb27f878] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1558.794959] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-725dbbb4-cf81-4e2b-9a2d-cefd1e432133 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: be58b2df-3231-418d-8772-f377eb27f878] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1558.795187] env[69994]: INFO nova.compute.manager [None req-725dbbb4-cf81-4e2b-9a2d-cefd1e432133 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: be58b2df-3231-418d-8772-f377eb27f878] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1558.795431] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-725dbbb4-cf81-4e2b-9a2d-cefd1e432133 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1558.795620] env[69994]: DEBUG nova.compute.manager [-] [instance: be58b2df-3231-418d-8772-f377eb27f878] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1558.795717] env[69994]: DEBUG nova.network.neutron [-] [instance: be58b2df-3231-418d-8772-f377eb27f878] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1559.226149] env[69994]: DEBUG nova.compute.manager [req-501eb4dc-9bd5-4372-aa2c-3a5a098b23e1 req-80c1d6a2-fc1f-456b-8c82-ec0bd5c88432 service nova] [instance: be58b2df-3231-418d-8772-f377eb27f878] Received event network-vif-deleted-a1097db7-be78-4408-acad-2dcef90b27c7 {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1559.226149] env[69994]: INFO nova.compute.manager [req-501eb4dc-9bd5-4372-aa2c-3a5a098b23e1 req-80c1d6a2-fc1f-456b-8c82-ec0bd5c88432 service nova] [instance: be58b2df-3231-418d-8772-f377eb27f878] Neutron deleted interface a1097db7-be78-4408-acad-2dcef90b27c7; detaching it from the instance and deleting it from the info cache [ 1559.226729] env[69994]: DEBUG nova.network.neutron [req-501eb4dc-9bd5-4372-aa2c-3a5a098b23e1 req-80c1d6a2-fc1f-456b-8c82-ec0bd5c88432 service nova] [instance: be58b2df-3231-418d-8772-f377eb27f878] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1559.709816] env[69994]: DEBUG nova.network.neutron [-] [instance: be58b2df-3231-418d-8772-f377eb27f878] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1559.729239] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bf21b89f-d8aa-4021-a983-ce71c312c51c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.739188] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbe6cf6c-05b3-4082-a396-1a5ffd8204db {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.763366] env[69994]: DEBUG nova.compute.manager [req-501eb4dc-9bd5-4372-aa2c-3a5a098b23e1 req-80c1d6a2-fc1f-456b-8c82-ec0bd5c88432 service nova] [instance: be58b2df-3231-418d-8772-f377eb27f878] Detach interface failed, port_id=a1097db7-be78-4408-acad-2dcef90b27c7, reason: Instance be58b2df-3231-418d-8772-f377eb27f878 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1560.212943] env[69994]: INFO nova.compute.manager [-] [instance: be58b2df-3231-418d-8772-f377eb27f878] Took 1.42 seconds to deallocate network for instance. [ 1560.720167] env[69994]: DEBUG oslo_concurrency.lockutils [None req-725dbbb4-cf81-4e2b-9a2d-cefd1e432133 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1560.720628] env[69994]: DEBUG oslo_concurrency.lockutils [None req-725dbbb4-cf81-4e2b-9a2d-cefd1e432133 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1560.720876] env[69994]: DEBUG nova.objects.instance [None req-725dbbb4-cf81-4e2b-9a2d-cefd1e432133 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lazy-loading 'resources' on Instance uuid be58b2df-3231-418d-8772-f377eb27f878 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1561.264793] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-380ae439-a392-4de9-8821-94abeaadca88 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.272036] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e0bafea-6910-4548-9a5a-0ecc2d51ff80 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.301029] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16b1cb77-e40e-4de6-9ca3-6e3eb31a0b06 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.307567] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af4de2f3-44ac-471b-9083-f14be804dd0d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.319846] env[69994]: DEBUG nova.compute.provider_tree [None req-725dbbb4-cf81-4e2b-9a2d-cefd1e432133 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1561.822764] env[69994]: DEBUG nova.scheduler.client.report [None req-725dbbb4-cf81-4e2b-9a2d-cefd1e432133 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1562.327844] env[69994]: DEBUG oslo_concurrency.lockutils [None req-725dbbb4-cf81-4e2b-9a2d-cefd1e432133 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.607s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1562.348515] env[69994]: INFO nova.scheduler.client.report [None req-725dbbb4-cf81-4e2b-9a2d-cefd1e432133 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Deleted allocations for instance be58b2df-3231-418d-8772-f377eb27f878 [ 1562.856752] env[69994]: DEBUG oslo_concurrency.lockutils [None req-725dbbb4-cf81-4e2b-9a2d-cefd1e432133 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "be58b2df-3231-418d-8772-f377eb27f878" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.674s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1564.173689] env[69994]: DEBUG oslo_concurrency.lockutils [None req-98f56ba8-2f38-4fc1-95b1-a24715ef0ca9 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "37d54d80-9cad-4cf4-9b8f-4790bbf4664d" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1564.174084] env[69994]: DEBUG oslo_concurrency.lockutils [None req-98f56ba8-2f38-4fc1-95b1-a24715ef0ca9 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "37d54d80-9cad-4cf4-9b8f-4790bbf4664d" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1564.677521] env[69994]: INFO nova.compute.manager [None req-98f56ba8-2f38-4fc1-95b1-a24715ef0ca9 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Detaching volume 23f63f26-2383-4b45-8ba9-421f266b1b57 [ 1564.708111] env[69994]: INFO nova.virt.block_device [None req-98f56ba8-2f38-4fc1-95b1-a24715ef0ca9 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Attempting to driver detach volume 23f63f26-2383-4b45-8ba9-421f266b1b57 from mountpoint /dev/sdb [ 1564.708346] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-98f56ba8-2f38-4fc1-95b1-a24715ef0ca9 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Volume detach. Driver type: vmdk {{(pid=69994) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1564.708569] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-98f56ba8-2f38-4fc1-95b1-a24715ef0ca9 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-648074', 'volume_id': '23f63f26-2383-4b45-8ba9-421f266b1b57', 'name': 'volume-23f63f26-2383-4b45-8ba9-421f266b1b57', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '37d54d80-9cad-4cf4-9b8f-4790bbf4664d', 'attached_at': '', 'detached_at': '', 'volume_id': '23f63f26-2383-4b45-8ba9-421f266b1b57', 'serial': '23f63f26-2383-4b45-8ba9-421f266b1b57'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1564.709467] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e050722e-bb12-4665-813f-7a600cb31816 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.730827] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-089f854e-6c1d-4b10-9faa-b4520e37a93d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.737538] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7bfcf8f-a9e5-490b-a22a-cb7bc51ac651 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.756837] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24a585fd-13ec-4d70-b13c-dd043fe14100 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.770605] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-98f56ba8-2f38-4fc1-95b1-a24715ef0ca9 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] The volume has not been displaced from its original location: [datastore2] volume-23f63f26-2383-4b45-8ba9-421f266b1b57/volume-23f63f26-2383-4b45-8ba9-421f266b1b57.vmdk. No consolidation needed. {{(pid=69994) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1564.775738] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-98f56ba8-2f38-4fc1-95b1-a24715ef0ca9 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Reconfiguring VM instance instance-0000007c to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1564.775993] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7534e5ac-1573-4469-bf1c-36b74b006928 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.793687] env[69994]: DEBUG oslo_vmware.api [None req-98f56ba8-2f38-4fc1-95b1-a24715ef0ca9 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1564.793687] env[69994]: value = "task-3242998" [ 1564.793687] env[69994]: _type = "Task" [ 1564.793687] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.801095] env[69994]: DEBUG oslo_vmware.api [None req-98f56ba8-2f38-4fc1-95b1-a24715ef0ca9 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3242998, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.302740] env[69994]: DEBUG oslo_vmware.api [None req-98f56ba8-2f38-4fc1-95b1-a24715ef0ca9 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3242998, 'name': ReconfigVM_Task, 'duration_secs': 0.222602} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.303219] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-98f56ba8-2f38-4fc1-95b1-a24715ef0ca9 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Reconfigured VM instance instance-0000007c to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1565.307536] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b559249-0912-4c93-a39e-9bddb5e2c355 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.322056] env[69994]: DEBUG oslo_vmware.api [None req-98f56ba8-2f38-4fc1-95b1-a24715ef0ca9 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1565.322056] env[69994]: value = "task-3242999" [ 1565.322056] env[69994]: _type = "Task" [ 1565.322056] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1565.330940] env[69994]: DEBUG oslo_vmware.api [None req-98f56ba8-2f38-4fc1-95b1-a24715ef0ca9 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3242999, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.832230] env[69994]: DEBUG oslo_vmware.api [None req-98f56ba8-2f38-4fc1-95b1-a24715ef0ca9 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3242999, 'name': ReconfigVM_Task, 'duration_secs': 0.137254} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.832523] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-98f56ba8-2f38-4fc1-95b1-a24715ef0ca9 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-648074', 'volume_id': '23f63f26-2383-4b45-8ba9-421f266b1b57', 'name': 'volume-23f63f26-2383-4b45-8ba9-421f266b1b57', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '37d54d80-9cad-4cf4-9b8f-4790bbf4664d', 'attached_at': '', 'detached_at': '', 'volume_id': '23f63f26-2383-4b45-8ba9-421f266b1b57', 'serial': '23f63f26-2383-4b45-8ba9-421f266b1b57'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1566.371701] env[69994]: DEBUG nova.objects.instance [None req-98f56ba8-2f38-4fc1-95b1-a24715ef0ca9 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lazy-loading 'flavor' on Instance uuid 37d54d80-9cad-4cf4-9b8f-4790bbf4664d {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1567.145827] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1567.379416] env[69994]: DEBUG oslo_concurrency.lockutils [None req-98f56ba8-2f38-4fc1-95b1-a24715ef0ca9 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "37d54d80-9cad-4cf4-9b8f-4790bbf4664d" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.205s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1568.145886] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1568.146074] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69994) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1568.405408] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eba62670-d794-4a8a-8f3c-b5d30234fd10 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "37d54d80-9cad-4cf4-9b8f-4790bbf4664d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1568.405852] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eba62670-d794-4a8a-8f3c-b5d30234fd10 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "37d54d80-9cad-4cf4-9b8f-4790bbf4664d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1568.405852] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eba62670-d794-4a8a-8f3c-b5d30234fd10 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "37d54d80-9cad-4cf4-9b8f-4790bbf4664d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1568.406044] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eba62670-d794-4a8a-8f3c-b5d30234fd10 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "37d54d80-9cad-4cf4-9b8f-4790bbf4664d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1568.406223] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eba62670-d794-4a8a-8f3c-b5d30234fd10 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "37d54d80-9cad-4cf4-9b8f-4790bbf4664d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1568.408350] env[69994]: INFO nova.compute.manager [None req-eba62670-d794-4a8a-8f3c-b5d30234fd10 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Terminating instance [ 1568.912798] env[69994]: DEBUG nova.compute.manager [None req-eba62670-d794-4a8a-8f3c-b5d30234fd10 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1568.913039] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-eba62670-d794-4a8a-8f3c-b5d30234fd10 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1568.914370] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab19d741-00da-4490-8c07-fc19604e4275 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.922021] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-eba62670-d794-4a8a-8f3c-b5d30234fd10 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1568.922246] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8c87cfc1-caeb-4274-bdc8-52b61bf9b2af {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.927615] env[69994]: DEBUG oslo_vmware.api [None req-eba62670-d794-4a8a-8f3c-b5d30234fd10 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1568.927615] env[69994]: value = "task-3243000" [ 1568.927615] env[69994]: _type = "Task" [ 1568.927615] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1568.934941] env[69994]: DEBUG oslo_vmware.api [None req-eba62670-d794-4a8a-8f3c-b5d30234fd10 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243000, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.437987] env[69994]: DEBUG oslo_vmware.api [None req-eba62670-d794-4a8a-8f3c-b5d30234fd10 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243000, 'name': PowerOffVM_Task, 'duration_secs': 0.143427} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1569.438381] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-eba62670-d794-4a8a-8f3c-b5d30234fd10 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1569.438439] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-eba62670-d794-4a8a-8f3c-b5d30234fd10 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1569.438652] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a6ce93b0-878c-4e0b-8649-707e5b59e18a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.502495] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-eba62670-d794-4a8a-8f3c-b5d30234fd10 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1569.502769] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-eba62670-d794-4a8a-8f3c-b5d30234fd10 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1569.502898] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-eba62670-d794-4a8a-8f3c-b5d30234fd10 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Deleting the datastore file [datastore1] 37d54d80-9cad-4cf4-9b8f-4790bbf4664d {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1569.503173] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-22892372-e374-4660-8f57-90edc7a3fcb0 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.510199] env[69994]: DEBUG oslo_vmware.api [None req-eba62670-d794-4a8a-8f3c-b5d30234fd10 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1569.510199] env[69994]: value = "task-3243002" [ 1569.510199] env[69994]: _type = "Task" [ 1569.510199] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.517843] env[69994]: DEBUG oslo_vmware.api [None req-eba62670-d794-4a8a-8f3c-b5d30234fd10 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243002, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.019580] env[69994]: DEBUG oslo_vmware.api [None req-eba62670-d794-4a8a-8f3c-b5d30234fd10 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243002, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.119786} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1570.019854] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-eba62670-d794-4a8a-8f3c-b5d30234fd10 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1570.019999] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-eba62670-d794-4a8a-8f3c-b5d30234fd10 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1570.020189] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-eba62670-d794-4a8a-8f3c-b5d30234fd10 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1570.020363] env[69994]: INFO nova.compute.manager [None req-eba62670-d794-4a8a-8f3c-b5d30234fd10 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1570.020599] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-eba62670-d794-4a8a-8f3c-b5d30234fd10 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1570.020784] env[69994]: DEBUG nova.compute.manager [-] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1570.020879] env[69994]: DEBUG nova.network.neutron [-] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1570.456680] env[69994]: DEBUG nova.compute.manager [req-8d7b117e-87e8-49a5-b435-9d3ae7ca130c req-381e7ceb-041f-4481-bfac-afcb8fcdf7f8 service nova] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Received event network-vif-deleted-7d02dd51-d5ca-4bf6-babe-300c991fc6bf {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1570.457119] env[69994]: INFO nova.compute.manager [req-8d7b117e-87e8-49a5-b435-9d3ae7ca130c req-381e7ceb-041f-4481-bfac-afcb8fcdf7f8 service nova] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Neutron deleted interface 7d02dd51-d5ca-4bf6-babe-300c991fc6bf; detaching it from the instance and deleting it from the info cache [ 1570.457119] env[69994]: DEBUG nova.network.neutron [req-8d7b117e-87e8-49a5-b435-9d3ae7ca130c req-381e7ceb-041f-4481-bfac-afcb8fcdf7f8 service nova] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1570.941716] env[69994]: DEBUG nova.network.neutron [-] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1570.959649] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0553306f-9c62-4272-b482-a2ea147c6700 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.969508] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f72e54d5-a9bf-4bde-83d8-6f82c320adca {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.992930] env[69994]: DEBUG nova.compute.manager [req-8d7b117e-87e8-49a5-b435-9d3ae7ca130c req-381e7ceb-041f-4481-bfac-afcb8fcdf7f8 service nova] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Detach interface failed, port_id=7d02dd51-d5ca-4bf6-babe-300c991fc6bf, reason: Instance 37d54d80-9cad-4cf4-9b8f-4790bbf4664d could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1571.146055] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1571.445214] env[69994]: INFO nova.compute.manager [-] [instance: 37d54d80-9cad-4cf4-9b8f-4790bbf4664d] Took 1.42 seconds to deallocate network for instance. [ 1571.951970] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eba62670-d794-4a8a-8f3c-b5d30234fd10 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1571.952369] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eba62670-d794-4a8a-8f3c-b5d30234fd10 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1571.952520] env[69994]: DEBUG nova.objects.instance [None req-eba62670-d794-4a8a-8f3c-b5d30234fd10 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lazy-loading 'resources' on Instance uuid 37d54d80-9cad-4cf4-9b8f-4790bbf4664d {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1572.488374] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b99851fa-9b8b-4858-8412-c5c068b34d3c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.495974] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a64e7e36-2a97-41f1-9379-92837120a66d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.525935] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ae0b7d8-efc4-4609-86d1-564295734550 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.532349] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a33bd973-ec04-4d25-bc83-a03b32d11701 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.544764] env[69994]: DEBUG nova.compute.provider_tree [None req-eba62670-d794-4a8a-8f3c-b5d30234fd10 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1573.047856] env[69994]: DEBUG nova.scheduler.client.report [None req-eba62670-d794-4a8a-8f3c-b5d30234fd10 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1573.145722] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1573.553014] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eba62670-d794-4a8a-8f3c-b5d30234fd10 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.601s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1573.574010] env[69994]: INFO nova.scheduler.client.report [None req-eba62670-d794-4a8a-8f3c-b5d30234fd10 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Deleted allocations for instance 37d54d80-9cad-4cf4-9b8f-4790bbf4664d [ 1574.082801] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eba62670-d794-4a8a-8f3c-b5d30234fd10 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "37d54d80-9cad-4cf4-9b8f-4790bbf4664d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.677s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1574.145754] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1574.145981] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1574.648989] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1574.649303] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1574.649477] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1574.649643] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69994) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1574.650559] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20c26636-ae3b-4f93-b959-90323d4f4d80 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.659072] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e55c98f3-1c71-4e8d-9bb2-5483067da028 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.672951] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d855bdb4-7ba9-4c43-95bd-a0d9235817c4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.679628] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4052a42f-3305-44eb-8c5e-2d39e06395b1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.709775] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180572MB free_disk=120GB free_vcpus=48 pci_devices=None {{(pid=69994) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1574.710045] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1574.710221] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1575.730578] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1575.730857] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1575.743306] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db9d0afe-361c-48ed-ac44-587a7e121717 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.751042] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1830a7c-5042-400a-b585-58cb0f3dbb56 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.780312] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50b4e752-b3f2-48ae-9eed-fb9abd740180 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.787223] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f11a773-1b15-4ba2-a581-345c9d5b5fcd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.799656] env[69994]: DEBUG nova.compute.provider_tree [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1576.278104] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "8f5e688e-99cc-465e-9b06-7eb95213a9d7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1576.278342] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "8f5e688e-99cc-465e-9b06-7eb95213a9d7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1576.302508] env[69994]: DEBUG nova.scheduler.client.report [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1576.781086] env[69994]: DEBUG nova.compute.manager [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1576.807156] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69994) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1576.807361] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.097s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1577.299193] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1577.299467] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1577.300951] env[69994]: INFO nova.compute.claims [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1577.803260] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1577.803583] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1578.333444] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db24788f-7e93-476b-9807-9c800ea72017 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.340911] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51a8d444-96a6-4cdd-946f-0affc6020563 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.369668] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b88aa479-2f38-460b-b446-40eca0fd7aaa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.376138] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b26c67a5-6249-4e28-a681-5f7e09260682 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.389533] env[69994]: DEBUG nova.compute.provider_tree [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1578.892889] env[69994]: DEBUG nova.scheduler.client.report [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1579.397791] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.098s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1579.398334] env[69994]: DEBUG nova.compute.manager [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1579.903663] env[69994]: DEBUG nova.compute.utils [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1579.905097] env[69994]: DEBUG nova.compute.manager [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1579.905288] env[69994]: DEBUG nova.network.neutron [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1579.959372] env[69994]: DEBUG nova.policy [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c4eba6562c3a41d0bfb7aeb393a600b3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8a7bbd48e22345cc9c3f09a574143d7f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1580.408238] env[69994]: DEBUG nova.compute.manager [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1580.434588] env[69994]: DEBUG nova.network.neutron [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Successfully created port: 6c6a495f-6dee-4078-9507-ccfb8d02caec {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1581.418210] env[69994]: DEBUG nova.compute.manager [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1581.443662] env[69994]: DEBUG nova.virt.hardware [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1581.443911] env[69994]: DEBUG nova.virt.hardware [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1581.444081] env[69994]: DEBUG nova.virt.hardware [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1581.444268] env[69994]: DEBUG nova.virt.hardware [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1581.444423] env[69994]: DEBUG nova.virt.hardware [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1581.444569] env[69994]: DEBUG nova.virt.hardware [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1581.444772] env[69994]: DEBUG nova.virt.hardware [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1581.444930] env[69994]: DEBUG nova.virt.hardware [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1581.445145] env[69994]: DEBUG nova.virt.hardware [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1581.445312] env[69994]: DEBUG nova.virt.hardware [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1581.445486] env[69994]: DEBUG nova.virt.hardware [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1581.446363] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dbf9cb1-15dd-48d6-80f9-3d8b03ee0b5f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.454196] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d959bdb-0398-4b4c-8211-34904bf8afb9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.777014] env[69994]: DEBUG nova.compute.manager [req-8793ac6f-2ec6-4948-8425-b6c9e7e715c7 req-4d192693-f18d-4df7-840d-c4dfea2a0f63 service nova] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Received event network-vif-plugged-6c6a495f-6dee-4078-9507-ccfb8d02caec {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1581.777244] env[69994]: DEBUG oslo_concurrency.lockutils [req-8793ac6f-2ec6-4948-8425-b6c9e7e715c7 req-4d192693-f18d-4df7-840d-c4dfea2a0f63 service nova] Acquiring lock "8f5e688e-99cc-465e-9b06-7eb95213a9d7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1581.777487] env[69994]: DEBUG oslo_concurrency.lockutils [req-8793ac6f-2ec6-4948-8425-b6c9e7e715c7 req-4d192693-f18d-4df7-840d-c4dfea2a0f63 service nova] Lock "8f5e688e-99cc-465e-9b06-7eb95213a9d7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1581.777689] env[69994]: DEBUG oslo_concurrency.lockutils [req-8793ac6f-2ec6-4948-8425-b6c9e7e715c7 req-4d192693-f18d-4df7-840d-c4dfea2a0f63 service nova] Lock "8f5e688e-99cc-465e-9b06-7eb95213a9d7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1581.777920] env[69994]: DEBUG nova.compute.manager [req-8793ac6f-2ec6-4948-8425-b6c9e7e715c7 req-4d192693-f18d-4df7-840d-c4dfea2a0f63 service nova] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] No waiting events found dispatching network-vif-plugged-6c6a495f-6dee-4078-9507-ccfb8d02caec {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1581.778028] env[69994]: WARNING nova.compute.manager [req-8793ac6f-2ec6-4948-8425-b6c9e7e715c7 req-4d192693-f18d-4df7-840d-c4dfea2a0f63 service nova] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Received unexpected event network-vif-plugged-6c6a495f-6dee-4078-9507-ccfb8d02caec for instance with vm_state building and task_state spawning. [ 1581.855078] env[69994]: DEBUG nova.network.neutron [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Successfully updated port: 6c6a495f-6dee-4078-9507-ccfb8d02caec {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1582.357374] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "refresh_cache-8f5e688e-99cc-465e-9b06-7eb95213a9d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1582.357574] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquired lock "refresh_cache-8f5e688e-99cc-465e-9b06-7eb95213a9d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1582.357644] env[69994]: DEBUG nova.network.neutron [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1582.889531] env[69994]: DEBUG nova.network.neutron [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1583.002820] env[69994]: DEBUG nova.network.neutron [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Updating instance_info_cache with network_info: [{"id": "6c6a495f-6dee-4078-9507-ccfb8d02caec", "address": "fa:16:3e:9a:a3:3f", "network": {"id": "f981c93f-f03a-4cdd-ac65-6c8511d8093a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2111076881-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a7bbd48e22345cc9c3f09a574143d7f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c6a495f-6d", "ovs_interfaceid": "6c6a495f-6dee-4078-9507-ccfb8d02caec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1583.505883] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Releasing lock "refresh_cache-8f5e688e-99cc-465e-9b06-7eb95213a9d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1583.506187] env[69994]: DEBUG nova.compute.manager [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Instance network_info: |[{"id": "6c6a495f-6dee-4078-9507-ccfb8d02caec", "address": "fa:16:3e:9a:a3:3f", "network": {"id": "f981c93f-f03a-4cdd-ac65-6c8511d8093a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2111076881-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a7bbd48e22345cc9c3f09a574143d7f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c6a495f-6d", "ovs_interfaceid": "6c6a495f-6dee-4078-9507-ccfb8d02caec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1583.506642] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9a:a3:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f880ac2e-d532-4f54-87bb-998a8d1bca78', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6c6a495f-6dee-4078-9507-ccfb8d02caec', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1583.514103] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1583.514306] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1583.514525] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d1814526-9011-4040-909c-77c316dda744 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.535551] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1583.535551] env[69994]: value = "task-3243003" [ 1583.535551] env[69994]: _type = "Task" [ 1583.535551] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.543921] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3243003, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.803587] env[69994]: DEBUG nova.compute.manager [req-d2cc8a6e-6dcc-4540-bb75-c43f57bf8a91 req-18cfc64d-062b-496b-8922-75257d0343bf service nova] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Received event network-changed-6c6a495f-6dee-4078-9507-ccfb8d02caec {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1583.803835] env[69994]: DEBUG nova.compute.manager [req-d2cc8a6e-6dcc-4540-bb75-c43f57bf8a91 req-18cfc64d-062b-496b-8922-75257d0343bf service nova] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Refreshing instance network info cache due to event network-changed-6c6a495f-6dee-4078-9507-ccfb8d02caec. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1583.804073] env[69994]: DEBUG oslo_concurrency.lockutils [req-d2cc8a6e-6dcc-4540-bb75-c43f57bf8a91 req-18cfc64d-062b-496b-8922-75257d0343bf service nova] Acquiring lock "refresh_cache-8f5e688e-99cc-465e-9b06-7eb95213a9d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1583.804220] env[69994]: DEBUG oslo_concurrency.lockutils [req-d2cc8a6e-6dcc-4540-bb75-c43f57bf8a91 req-18cfc64d-062b-496b-8922-75257d0343bf service nova] Acquired lock "refresh_cache-8f5e688e-99cc-465e-9b06-7eb95213a9d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1583.804400] env[69994]: DEBUG nova.network.neutron [req-d2cc8a6e-6dcc-4540-bb75-c43f57bf8a91 req-18cfc64d-062b-496b-8922-75257d0343bf service nova] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Refreshing network info cache for port 6c6a495f-6dee-4078-9507-ccfb8d02caec {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1584.045967] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3243003, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.477911] env[69994]: DEBUG nova.network.neutron [req-d2cc8a6e-6dcc-4540-bb75-c43f57bf8a91 req-18cfc64d-062b-496b-8922-75257d0343bf service nova] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Updated VIF entry in instance network info cache for port 6c6a495f-6dee-4078-9507-ccfb8d02caec. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1584.478285] env[69994]: DEBUG nova.network.neutron [req-d2cc8a6e-6dcc-4540-bb75-c43f57bf8a91 req-18cfc64d-062b-496b-8922-75257d0343bf service nova] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Updating instance_info_cache with network_info: [{"id": "6c6a495f-6dee-4078-9507-ccfb8d02caec", "address": "fa:16:3e:9a:a3:3f", "network": {"id": "f981c93f-f03a-4cdd-ac65-6c8511d8093a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2111076881-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a7bbd48e22345cc9c3f09a574143d7f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c6a495f-6d", "ovs_interfaceid": "6c6a495f-6dee-4078-9507-ccfb8d02caec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1584.546561] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3243003, 'name': CreateVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.981330] env[69994]: DEBUG oslo_concurrency.lockutils [req-d2cc8a6e-6dcc-4540-bb75-c43f57bf8a91 req-18cfc64d-062b-496b-8922-75257d0343bf service nova] Releasing lock "refresh_cache-8f5e688e-99cc-465e-9b06-7eb95213a9d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1585.047435] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3243003, 'name': CreateVM_Task, 'duration_secs': 1.290422} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1585.047813] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1585.048269] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1585.048433] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1585.048760] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1585.049008] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a88f9d2a-ba1a-4af0-8b27-0105a7c27087 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.053093] env[69994]: DEBUG oslo_vmware.api [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1585.053093] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]524e45d2-b138-0a3d-25bb-fdf70b951ea6" [ 1585.053093] env[69994]: _type = "Task" [ 1585.053093] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.060579] env[69994]: DEBUG oslo_vmware.api [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]524e45d2-b138-0a3d-25bb-fdf70b951ea6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.564663] env[69994]: DEBUG oslo_vmware.api [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]524e45d2-b138-0a3d-25bb-fdf70b951ea6, 'name': SearchDatastore_Task, 'duration_secs': 0.011143} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1585.564995] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1585.565244] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1585.565478] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1585.565625] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1585.565804] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1585.566095] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fc44889f-6573-4aca-8f67-a15c57bd96c2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.575238] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1585.575413] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1585.576089] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b8a396a-0aa9-4a6b-a754-edd5c4e2e791 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.580973] env[69994]: DEBUG oslo_vmware.api [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1585.580973] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52781156-2076-3b05-b7b7-0af470d05c95" [ 1585.580973] env[69994]: _type = "Task" [ 1585.580973] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.588448] env[69994]: DEBUG oslo_vmware.api [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52781156-2076-3b05-b7b7-0af470d05c95, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.091226] env[69994]: DEBUG oslo_vmware.api [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52781156-2076-3b05-b7b7-0af470d05c95, 'name': SearchDatastore_Task, 'duration_secs': 0.008293} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.091977] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54e8d653-2364-421b-90a6-f3c4397aff62 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.096608] env[69994]: DEBUG oslo_vmware.api [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1586.096608] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]524d658e-16bc-e447-20b4-c89f1a1a0a8d" [ 1586.096608] env[69994]: _type = "Task" [ 1586.096608] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.105242] env[69994]: DEBUG oslo_vmware.api [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]524d658e-16bc-e447-20b4-c89f1a1a0a8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.606885] env[69994]: DEBUG oslo_vmware.api [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]524d658e-16bc-e447-20b4-c89f1a1a0a8d, 'name': SearchDatastore_Task, 'duration_secs': 0.010339} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.607167] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1586.607401] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 8f5e688e-99cc-465e-9b06-7eb95213a9d7/8f5e688e-99cc-465e-9b06-7eb95213a9d7.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1586.607645] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-27745f42-7b2f-4795-b040-c5cfcdaf604f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.614270] env[69994]: DEBUG oslo_vmware.api [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1586.614270] env[69994]: value = "task-3243004" [ 1586.614270] env[69994]: _type = "Task" [ 1586.614270] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.621344] env[69994]: DEBUG oslo_vmware.api [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243004, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.124252] env[69994]: DEBUG oslo_vmware.api [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243004, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.625040] env[69994]: DEBUG oslo_vmware.api [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243004, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.531183} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.625335] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore1] 8f5e688e-99cc-465e-9b06-7eb95213a9d7/8f5e688e-99cc-465e-9b06-7eb95213a9d7.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1587.625577] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1587.625857] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-358a03b2-e4ff-42be-a9ad-a7d47787dd8d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.632374] env[69994]: DEBUG oslo_vmware.api [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1587.632374] env[69994]: value = "task-3243005" [ 1587.632374] env[69994]: _type = "Task" [ 1587.632374] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.641833] env[69994]: DEBUG oslo_vmware.api [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243005, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.141909] env[69994]: DEBUG oslo_vmware.api [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243005, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.357819} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.142512] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1588.142987] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81728212-779c-45c9-ba3e-776874164f17 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.164949] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Reconfiguring VM instance instance-0000007e to attach disk [datastore1] 8f5e688e-99cc-465e-9b06-7eb95213a9d7/8f5e688e-99cc-465e-9b06-7eb95213a9d7.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1588.165285] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8b655b09-eb7d-4d96-bba3-bcceb2755dcd {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.184225] env[69994]: DEBUG oslo_vmware.api [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1588.184225] env[69994]: value = "task-3243006" [ 1588.184225] env[69994]: _type = "Task" [ 1588.184225] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.192282] env[69994]: DEBUG oslo_vmware.api [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243006, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.693951] env[69994]: DEBUG oslo_vmware.api [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243006, 'name': ReconfigVM_Task, 'duration_secs': 0.400178} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.694248] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Reconfigured VM instance instance-0000007e to attach disk [datastore1] 8f5e688e-99cc-465e-9b06-7eb95213a9d7/8f5e688e-99cc-465e-9b06-7eb95213a9d7.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1588.695056] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d006a6e3-f35a-4fad-b069-88b14407ad2b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.700833] env[69994]: DEBUG oslo_vmware.api [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1588.700833] env[69994]: value = "task-3243007" [ 1588.700833] env[69994]: _type = "Task" [ 1588.700833] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.708481] env[69994]: DEBUG oslo_vmware.api [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243007, 'name': Rename_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.211485] env[69994]: DEBUG oslo_vmware.api [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243007, 'name': Rename_Task, 'duration_secs': 0.168927} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.211904] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1589.211904] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-946ffd14-034c-4170-b39c-b5766e20513a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.218301] env[69994]: DEBUG oslo_vmware.api [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1589.218301] env[69994]: value = "task-3243008" [ 1589.218301] env[69994]: _type = "Task" [ 1589.218301] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.225362] env[69994]: DEBUG oslo_vmware.api [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243008, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.728019] env[69994]: DEBUG oslo_vmware.api [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243008, 'name': PowerOnVM_Task, 'duration_secs': 0.458558} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.728317] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1589.728531] env[69994]: INFO nova.compute.manager [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Took 8.31 seconds to spawn the instance on the hypervisor. [ 1589.728745] env[69994]: DEBUG nova.compute.manager [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1589.729529] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4ec427c-39c2-4580-81c9-9a533d4034ff {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.249857] env[69994]: INFO nova.compute.manager [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Took 12.96 seconds to build instance. [ 1590.751624] env[69994]: DEBUG oslo_concurrency.lockutils [None req-f0818247-fb2d-4f4e-83ed-cdabc92c3bd0 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "8f5e688e-99cc-465e-9b06-7eb95213a9d7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.473s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1591.371894] env[69994]: DEBUG nova.compute.manager [req-2b01cf9a-0c2a-4f9a-a40b-880b948f72d1 req-eef0b249-28f5-4741-bf40-f4ba76cfd0f8 service nova] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Received event network-changed-6c6a495f-6dee-4078-9507-ccfb8d02caec {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1591.372175] env[69994]: DEBUG nova.compute.manager [req-2b01cf9a-0c2a-4f9a-a40b-880b948f72d1 req-eef0b249-28f5-4741-bf40-f4ba76cfd0f8 service nova] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Refreshing instance network info cache due to event network-changed-6c6a495f-6dee-4078-9507-ccfb8d02caec. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1591.372312] env[69994]: DEBUG oslo_concurrency.lockutils [req-2b01cf9a-0c2a-4f9a-a40b-880b948f72d1 req-eef0b249-28f5-4741-bf40-f4ba76cfd0f8 service nova] Acquiring lock "refresh_cache-8f5e688e-99cc-465e-9b06-7eb95213a9d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1591.372408] env[69994]: DEBUG oslo_concurrency.lockutils [req-2b01cf9a-0c2a-4f9a-a40b-880b948f72d1 req-eef0b249-28f5-4741-bf40-f4ba76cfd0f8 service nova] Acquired lock "refresh_cache-8f5e688e-99cc-465e-9b06-7eb95213a9d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1591.372566] env[69994]: DEBUG nova.network.neutron [req-2b01cf9a-0c2a-4f9a-a40b-880b948f72d1 req-eef0b249-28f5-4741-bf40-f4ba76cfd0f8 service nova] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Refreshing network info cache for port 6c6a495f-6dee-4078-9507-ccfb8d02caec {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1592.065740] env[69994]: DEBUG nova.network.neutron [req-2b01cf9a-0c2a-4f9a-a40b-880b948f72d1 req-eef0b249-28f5-4741-bf40-f4ba76cfd0f8 service nova] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Updated VIF entry in instance network info cache for port 6c6a495f-6dee-4078-9507-ccfb8d02caec. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1592.066101] env[69994]: DEBUG nova.network.neutron [req-2b01cf9a-0c2a-4f9a-a40b-880b948f72d1 req-eef0b249-28f5-4741-bf40-f4ba76cfd0f8 service nova] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Updating instance_info_cache with network_info: [{"id": "6c6a495f-6dee-4078-9507-ccfb8d02caec", "address": "fa:16:3e:9a:a3:3f", "network": {"id": "f981c93f-f03a-4cdd-ac65-6c8511d8093a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2111076881-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.188", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a7bbd48e22345cc9c3f09a574143d7f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c6a495f-6d", "ovs_interfaceid": "6c6a495f-6dee-4078-9507-ccfb8d02caec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1592.568760] env[69994]: DEBUG oslo_concurrency.lockutils [req-2b01cf9a-0c2a-4f9a-a40b-880b948f72d1 req-eef0b249-28f5-4741-bf40-f4ba76cfd0f8 service nova] Releasing lock "refresh_cache-8f5e688e-99cc-465e-9b06-7eb95213a9d7" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1629.105224] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb3be1b7-0faf-47e5-8fc2-67860ce11268 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "8f5e688e-99cc-465e-9b06-7eb95213a9d7" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1629.105515] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb3be1b7-0faf-47e5-8fc2-67860ce11268 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "8f5e688e-99cc-465e-9b06-7eb95213a9d7" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1629.145727] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1629.145940] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1629.146097] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69994) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1629.608443] env[69994]: DEBUG nova.compute.utils [None req-eb3be1b7-0faf-47e5-8fc2-67860ce11268 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1630.111534] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb3be1b7-0faf-47e5-8fc2-67860ce11268 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "8f5e688e-99cc-465e-9b06-7eb95213a9d7" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1631.142065] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1631.171212] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb3be1b7-0faf-47e5-8fc2-67860ce11268 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "8f5e688e-99cc-465e-9b06-7eb95213a9d7" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1631.171448] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb3be1b7-0faf-47e5-8fc2-67860ce11268 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "8f5e688e-99cc-465e-9b06-7eb95213a9d7" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1631.171668] env[69994]: INFO nova.compute.manager [None req-eb3be1b7-0faf-47e5-8fc2-67860ce11268 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Attaching volume 5c87e14a-8b76-4a5c-b197-3cbf30defc75 to /dev/sdb [ 1631.203154] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2adf679-aed9-4f02-98c7-aa89a4a8e3a8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.210271] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ad208bb-16a7-4e18-aa62-8b2f7796ff54 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.223306] env[69994]: DEBUG nova.virt.block_device [None req-eb3be1b7-0faf-47e5-8fc2-67860ce11268 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Updating existing volume attachment record: 95e37fdd-f2e2-4c75-aef4-497712adcb24 {{(pid=69994) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1632.145920] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1634.145717] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1635.141598] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1635.145255] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1635.764463] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb3be1b7-0faf-47e5-8fc2-67860ce11268 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Volume attach. Driver type: vmdk {{(pid=69994) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1635.764719] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb3be1b7-0faf-47e5-8fc2-67860ce11268 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-648077', 'volume_id': '5c87e14a-8b76-4a5c-b197-3cbf30defc75', 'name': 'volume-5c87e14a-8b76-4a5c-b197-3cbf30defc75', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8f5e688e-99cc-465e-9b06-7eb95213a9d7', 'attached_at': '', 'detached_at': '', 'volume_id': '5c87e14a-8b76-4a5c-b197-3cbf30defc75', 'serial': '5c87e14a-8b76-4a5c-b197-3cbf30defc75'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1635.765663] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-765d14e2-f7a3-44b8-9b01-effec64b7280 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.781871] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0957129-639b-4af3-bd34-6a2af1855ace {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.805012] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb3be1b7-0faf-47e5-8fc2-67860ce11268 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Reconfiguring VM instance instance-0000007e to attach disk [datastore2] volume-5c87e14a-8b76-4a5c-b197-3cbf30defc75/volume-5c87e14a-8b76-4a5c-b197-3cbf30defc75.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1635.805229] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0295ba62-3d09-44ef-a28a-52292e778041 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.822552] env[69994]: DEBUG oslo_vmware.api [None req-eb3be1b7-0faf-47e5-8fc2-67860ce11268 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1635.822552] env[69994]: value = "task-3243011" [ 1635.822552] env[69994]: _type = "Task" [ 1635.822552] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.829755] env[69994]: DEBUG oslo_vmware.api [None req-eb3be1b7-0faf-47e5-8fc2-67860ce11268 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243011, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.145747] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1636.332280] env[69994]: DEBUG oslo_vmware.api [None req-eb3be1b7-0faf-47e5-8fc2-67860ce11268 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243011, 'name': ReconfigVM_Task, 'duration_secs': 0.315862} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1636.332551] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb3be1b7-0faf-47e5-8fc2-67860ce11268 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Reconfigured VM instance instance-0000007e to attach disk [datastore2] volume-5c87e14a-8b76-4a5c-b197-3cbf30defc75/volume-5c87e14a-8b76-4a5c-b197-3cbf30defc75.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1636.337090] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-75f6a340-5928-4e34-9906-cc3d27d1ff6a {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.351224] env[69994]: DEBUG oslo_vmware.api [None req-eb3be1b7-0faf-47e5-8fc2-67860ce11268 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1636.351224] env[69994]: value = "task-3243012" [ 1636.351224] env[69994]: _type = "Task" [ 1636.351224] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1636.358458] env[69994]: DEBUG oslo_vmware.api [None req-eb3be1b7-0faf-47e5-8fc2-67860ce11268 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243012, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.649476] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1636.649730] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1636.649858] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1636.650034] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69994) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1636.650921] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb3f674e-03ba-4782-9283-71fb32134052 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.659113] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b90d93c-ddb5-477c-8f48-e6c4e27c143d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.672723] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7216a61d-c22c-4c20-aec4-a6767e39d05b {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.678809] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-735805c0-cb6d-4006-969d-220c8eae8781 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.708758] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180730MB free_disk=120GB free_vcpus=48 pci_devices=None {{(pid=69994) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1636.708954] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1636.709154] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1636.860936] env[69994]: DEBUG oslo_vmware.api [None req-eb3be1b7-0faf-47e5-8fc2-67860ce11268 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243012, 'name': ReconfigVM_Task, 'duration_secs': 0.135008} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1636.861263] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb3be1b7-0faf-47e5-8fc2-67860ce11268 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-648077', 'volume_id': '5c87e14a-8b76-4a5c-b197-3cbf30defc75', 'name': 'volume-5c87e14a-8b76-4a5c-b197-3cbf30defc75', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8f5e688e-99cc-465e-9b06-7eb95213a9d7', 'attached_at': '', 'detached_at': '', 'volume_id': '5c87e14a-8b76-4a5c-b197-3cbf30defc75', 'serial': '5c87e14a-8b76-4a5c-b197-3cbf30defc75'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1637.733222] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance 8f5e688e-99cc-465e-9b06-7eb95213a9d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1637.733506] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1637.733668] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1637.758618] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c07a30c2-4986-4e57-aed1-69136fc3cd40 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.766111] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa4fd113-8a7c-4da2-a455-0666e04fd0e7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.796197] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c19a8ec-c8d5-4b56-a0ce-1ce0ed1761aa {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.803108] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7590990-5da6-4cbf-b48d-dbe488cb2b44 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.815643] env[69994]: DEBUG nova.compute.provider_tree [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1637.896268] env[69994]: DEBUG nova.objects.instance [None req-eb3be1b7-0faf-47e5-8fc2-67860ce11268 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lazy-loading 'flavor' on Instance uuid 8f5e688e-99cc-465e-9b06-7eb95213a9d7 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1638.318866] env[69994]: DEBUG nova.scheduler.client.report [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1638.403054] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb3be1b7-0faf-47e5-8fc2-67860ce11268 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "8f5e688e-99cc-465e-9b06-7eb95213a9d7" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.231s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1638.618967] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0e172e28-badd-4fd6-af84-cd4be80253b2 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "8f5e688e-99cc-465e-9b06-7eb95213a9d7" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1638.619242] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0e172e28-badd-4fd6-af84-cd4be80253b2 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "8f5e688e-99cc-465e-9b06-7eb95213a9d7" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1638.823430] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69994) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1638.823676] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.114s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1639.122854] env[69994]: INFO nova.compute.manager [None req-0e172e28-badd-4fd6-af84-cd4be80253b2 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Detaching volume 5c87e14a-8b76-4a5c-b197-3cbf30defc75 [ 1639.152343] env[69994]: INFO nova.virt.block_device [None req-0e172e28-badd-4fd6-af84-cd4be80253b2 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Attempting to driver detach volume 5c87e14a-8b76-4a5c-b197-3cbf30defc75 from mountpoint /dev/sdb [ 1639.152577] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e172e28-badd-4fd6-af84-cd4be80253b2 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Volume detach. Driver type: vmdk {{(pid=69994) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1639.152756] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e172e28-badd-4fd6-af84-cd4be80253b2 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-648077', 'volume_id': '5c87e14a-8b76-4a5c-b197-3cbf30defc75', 'name': 'volume-5c87e14a-8b76-4a5c-b197-3cbf30defc75', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8f5e688e-99cc-465e-9b06-7eb95213a9d7', 'attached_at': '', 'detached_at': '', 'volume_id': '5c87e14a-8b76-4a5c-b197-3cbf30defc75', 'serial': '5c87e14a-8b76-4a5c-b197-3cbf30defc75'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1639.153630] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-762c90ee-2517-455f-aaa6-d03368dec448 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.175429] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e26a21f-c59c-42c7-bb93-da167c754161 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.181779] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75ba8ae2-f975-4ac0-8574-2f7b5d207bb4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.201998] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac773353-0ce3-4505-863d-6d445675fa96 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.215685] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e172e28-badd-4fd6-af84-cd4be80253b2 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] The volume has not been displaced from its original location: [datastore2] volume-5c87e14a-8b76-4a5c-b197-3cbf30defc75/volume-5c87e14a-8b76-4a5c-b197-3cbf30defc75.vmdk. No consolidation needed. {{(pid=69994) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1639.220812] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e172e28-badd-4fd6-af84-cd4be80253b2 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Reconfiguring VM instance instance-0000007e to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1639.221072] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8ac85e44-c764-409f-8cc6-34e012d560a8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.238182] env[69994]: DEBUG oslo_vmware.api [None req-0e172e28-badd-4fd6-af84-cd4be80253b2 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1639.238182] env[69994]: value = "task-3243013" [ 1639.238182] env[69994]: _type = "Task" [ 1639.238182] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1639.245275] env[69994]: DEBUG oslo_vmware.api [None req-0e172e28-badd-4fd6-af84-cd4be80253b2 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243013, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.747484] env[69994]: DEBUG oslo_vmware.api [None req-0e172e28-badd-4fd6-af84-cd4be80253b2 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243013, 'name': ReconfigVM_Task, 'duration_secs': 0.225752} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1639.747753] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e172e28-badd-4fd6-af84-cd4be80253b2 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Reconfigured VM instance instance-0000007e to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1639.752311] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-624cf667-039c-44ae-857a-21a5cb9b417c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.765998] env[69994]: DEBUG oslo_vmware.api [None req-0e172e28-badd-4fd6-af84-cd4be80253b2 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1639.765998] env[69994]: value = "task-3243014" [ 1639.765998] env[69994]: _type = "Task" [ 1639.765998] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1639.773244] env[69994]: DEBUG oslo_vmware.api [None req-0e172e28-badd-4fd6-af84-cd4be80253b2 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243014, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.823879] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1640.275732] env[69994]: DEBUG oslo_vmware.api [None req-0e172e28-badd-4fd6-af84-cd4be80253b2 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243014, 'name': ReconfigVM_Task, 'duration_secs': 0.128912} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1640.276179] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e172e28-badd-4fd6-af84-cd4be80253b2 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-648077', 'volume_id': '5c87e14a-8b76-4a5c-b197-3cbf30defc75', 'name': 'volume-5c87e14a-8b76-4a5c-b197-3cbf30defc75', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8f5e688e-99cc-465e-9b06-7eb95213a9d7', 'attached_at': '', 'detached_at': '', 'volume_id': '5c87e14a-8b76-4a5c-b197-3cbf30defc75', 'serial': '5c87e14a-8b76-4a5c-b197-3cbf30defc75'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1640.817352] env[69994]: DEBUG nova.objects.instance [None req-0e172e28-badd-4fd6-af84-cd4be80253b2 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lazy-loading 'flavor' on Instance uuid 8f5e688e-99cc-465e-9b06-7eb95213a9d7 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1641.824320] env[69994]: DEBUG oslo_concurrency.lockutils [None req-0e172e28-badd-4fd6-af84-cd4be80253b2 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "8f5e688e-99cc-465e-9b06-7eb95213a9d7" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.205s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1642.856296] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b446384b-076f-4eee-bbd2-ce74d28cfa90 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "8f5e688e-99cc-465e-9b06-7eb95213a9d7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1642.856690] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b446384b-076f-4eee-bbd2-ce74d28cfa90 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "8f5e688e-99cc-465e-9b06-7eb95213a9d7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1642.856762] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b446384b-076f-4eee-bbd2-ce74d28cfa90 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "8f5e688e-99cc-465e-9b06-7eb95213a9d7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1642.856973] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b446384b-076f-4eee-bbd2-ce74d28cfa90 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "8f5e688e-99cc-465e-9b06-7eb95213a9d7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1642.857174] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b446384b-076f-4eee-bbd2-ce74d28cfa90 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "8f5e688e-99cc-465e-9b06-7eb95213a9d7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1642.859258] env[69994]: INFO nova.compute.manager [None req-b446384b-076f-4eee-bbd2-ce74d28cfa90 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Terminating instance [ 1643.363467] env[69994]: DEBUG nova.compute.manager [None req-b446384b-076f-4eee-bbd2-ce74d28cfa90 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1643.363708] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b446384b-076f-4eee-bbd2-ce74d28cfa90 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1643.364619] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-689f51cb-ecef-419f-b838-4a8ad785e58f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.373594] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b446384b-076f-4eee-bbd2-ce74d28cfa90 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1643.373862] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f74abad7-ee15-4a90-9a28-60215d10f5e3 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.379340] env[69994]: DEBUG oslo_vmware.api [None req-b446384b-076f-4eee-bbd2-ce74d28cfa90 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1643.379340] env[69994]: value = "task-3243015" [ 1643.379340] env[69994]: _type = "Task" [ 1643.379340] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.386561] env[69994]: DEBUG oslo_vmware.api [None req-b446384b-076f-4eee-bbd2-ce74d28cfa90 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243015, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.889129] env[69994]: DEBUG oslo_vmware.api [None req-b446384b-076f-4eee-bbd2-ce74d28cfa90 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243015, 'name': PowerOffVM_Task, 'duration_secs': 0.189603} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.889536] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-b446384b-076f-4eee-bbd2-ce74d28cfa90 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1643.889536] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b446384b-076f-4eee-bbd2-ce74d28cfa90 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1643.889770] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dbeb858f-017d-43b5-b79d-6b72237ac7f1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.708069] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b446384b-076f-4eee-bbd2-ce74d28cfa90 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1644.709315] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b446384b-076f-4eee-bbd2-ce74d28cfa90 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Deleting contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1644.709315] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b446384b-076f-4eee-bbd2-ce74d28cfa90 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Deleting the datastore file [datastore1] 8f5e688e-99cc-465e-9b06-7eb95213a9d7 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1644.709315] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-68afa757-ca6a-443f-a169-89cf2a8bc3e8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.716353] env[69994]: DEBUG oslo_vmware.api [None req-b446384b-076f-4eee-bbd2-ce74d28cfa90 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1644.716353] env[69994]: value = "task-3243017" [ 1644.716353] env[69994]: _type = "Task" [ 1644.716353] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.724554] env[69994]: DEBUG oslo_vmware.api [None req-b446384b-076f-4eee-bbd2-ce74d28cfa90 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243017, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.226082] env[69994]: DEBUG oslo_vmware.api [None req-b446384b-076f-4eee-bbd2-ce74d28cfa90 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243017, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140872} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1645.226491] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-b446384b-076f-4eee-bbd2-ce74d28cfa90 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1645.226539] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b446384b-076f-4eee-bbd2-ce74d28cfa90 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Deleted contents of the VM from datastore datastore1 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1645.226683] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-b446384b-076f-4eee-bbd2-ce74d28cfa90 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1645.226861] env[69994]: INFO nova.compute.manager [None req-b446384b-076f-4eee-bbd2-ce74d28cfa90 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Took 1.86 seconds to destroy the instance on the hypervisor. [ 1645.227167] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b446384b-076f-4eee-bbd2-ce74d28cfa90 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1645.227363] env[69994]: DEBUG nova.compute.manager [-] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1645.227460] env[69994]: DEBUG nova.network.neutron [-] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1645.629115] env[69994]: DEBUG nova.compute.manager [req-d8a163aa-5a63-47e5-bc0d-c1fb045541d8 req-ba2f0586-f9a6-4fab-a4fe-86c4eace545c service nova] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Received event network-vif-deleted-6c6a495f-6dee-4078-9507-ccfb8d02caec {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1645.629395] env[69994]: INFO nova.compute.manager [req-d8a163aa-5a63-47e5-bc0d-c1fb045541d8 req-ba2f0586-f9a6-4fab-a4fe-86c4eace545c service nova] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Neutron deleted interface 6c6a495f-6dee-4078-9507-ccfb8d02caec; detaching it from the instance and deleting it from the info cache [ 1645.629512] env[69994]: DEBUG nova.network.neutron [req-d8a163aa-5a63-47e5-bc0d-c1fb045541d8 req-ba2f0586-f9a6-4fab-a4fe-86c4eace545c service nova] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1646.109724] env[69994]: DEBUG nova.network.neutron [-] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1646.132189] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3108eac9-5b3e-45bb-82ec-82e2a150128c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.141108] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b8d8c23-a5bc-4b49-8278-0725dee87122 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.165144] env[69994]: DEBUG nova.compute.manager [req-d8a163aa-5a63-47e5-bc0d-c1fb045541d8 req-ba2f0586-f9a6-4fab-a4fe-86c4eace545c service nova] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Detach interface failed, port_id=6c6a495f-6dee-4078-9507-ccfb8d02caec, reason: Instance 8f5e688e-99cc-465e-9b06-7eb95213a9d7 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1646.612183] env[69994]: INFO nova.compute.manager [-] [instance: 8f5e688e-99cc-465e-9b06-7eb95213a9d7] Took 1.38 seconds to deallocate network for instance. [ 1647.118598] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b446384b-076f-4eee-bbd2-ce74d28cfa90 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1647.118874] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b446384b-076f-4eee-bbd2-ce74d28cfa90 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1647.119051] env[69994]: DEBUG nova.objects.instance [None req-b446384b-076f-4eee-bbd2-ce74d28cfa90 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lazy-loading 'resources' on Instance uuid 8f5e688e-99cc-465e-9b06-7eb95213a9d7 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1647.655900] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a3fa506-6b2c-4283-91f4-bb8b304ed4e6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.665318] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8940b616-6a4b-453a-8bcd-41313da65dde {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.695875] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0afa84f-38b4-4587-aefc-0dedac957786 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.703580] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79193461-9f01-49a9-89a7-a065876e3519 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.716731] env[69994]: DEBUG nova.compute.provider_tree [None req-b446384b-076f-4eee-bbd2-ce74d28cfa90 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1648.220359] env[69994]: DEBUG nova.scheduler.client.report [None req-b446384b-076f-4eee-bbd2-ce74d28cfa90 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1648.725298] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b446384b-076f-4eee-bbd2-ce74d28cfa90 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.606s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1648.744803] env[69994]: INFO nova.scheduler.client.report [None req-b446384b-076f-4eee-bbd2-ce74d28cfa90 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Deleted allocations for instance 8f5e688e-99cc-465e-9b06-7eb95213a9d7 [ 1649.252867] env[69994]: DEBUG oslo_concurrency.lockutils [None req-b446384b-076f-4eee-bbd2-ce74d28cfa90 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "8f5e688e-99cc-465e-9b06-7eb95213a9d7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.396s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1650.842926] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "bd20712a-a899-4a34-9632-0bf73451c642" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1650.843213] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "bd20712a-a899-4a34-9632-0bf73451c642" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1651.344986] env[69994]: DEBUG nova.compute.manager [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Starting instance... {{(pid=69994) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1651.864220] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1651.864577] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1651.866416] env[69994]: INFO nova.compute.claims [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1652.900966] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38309c69-bf37-4510-bf49-f7b6b8b1dfe9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.908399] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e46c0a7e-eae0-4012-b882-def33da39e4e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.937313] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3cb72b0-f59f-47ef-8e9a-23db6e41fc4c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.944353] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68ad085e-1c7b-43e5-bfb1-a138bbbe6d9d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.957357] env[69994]: DEBUG nova.compute.provider_tree [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1653.460338] env[69994]: DEBUG nova.scheduler.client.report [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1653.966892] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.102s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1653.967604] env[69994]: DEBUG nova.compute.manager [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Start building networks asynchronously for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1654.472928] env[69994]: DEBUG nova.compute.utils [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1654.474341] env[69994]: DEBUG nova.compute.manager [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Allocating IP information in the background. {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1654.474512] env[69994]: DEBUG nova.network.neutron [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] allocate_for_instance() {{(pid=69994) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1654.527492] env[69994]: DEBUG nova.policy [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c4eba6562c3a41d0bfb7aeb393a600b3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8a7bbd48e22345cc9c3f09a574143d7f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69994) authorize /opt/stack/nova/nova/policy.py:192}} [ 1654.782291] env[69994]: DEBUG nova.network.neutron [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Successfully created port: 9616eeaa-e068-4ed3-bc81-5eda79dfc54c {{(pid=69994) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1654.977734] env[69994]: DEBUG nova.compute.manager [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Start building block device mappings for instance. {{(pid=69994) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1655.987724] env[69994]: DEBUG nova.compute.manager [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Start spawning the instance on the hypervisor. {{(pid=69994) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1656.013821] env[69994]: DEBUG nova.virt.hardware [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T08:37:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T08:36:49Z,direct_url=,disk_format='vmdk',id=cc2e14cc-b12f-480a-a387-dd21e9efda8b,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1110add503f24d308ace30fa5efaa3e8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T08:36:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1656.014083] env[69994]: DEBUG nova.virt.hardware [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Flavor limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1656.014243] env[69994]: DEBUG nova.virt.hardware [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Image limits 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1656.014421] env[69994]: DEBUG nova.virt.hardware [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Flavor pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1656.014563] env[69994]: DEBUG nova.virt.hardware [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Image pref 0:0:0 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1656.014706] env[69994]: DEBUG nova.virt.hardware [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69994) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1656.014912] env[69994]: DEBUG nova.virt.hardware [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1656.015164] env[69994]: DEBUG nova.virt.hardware [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1656.015359] env[69994]: DEBUG nova.virt.hardware [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Got 1 possible topologies {{(pid=69994) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1656.015522] env[69994]: DEBUG nova.virt.hardware [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1656.015695] env[69994]: DEBUG nova.virt.hardware [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69994) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1656.016680] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9879d7c-36d5-4fe9-bd67-0c68edeaf169 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.024712] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61d78acd-b769-4bff-aeb7-c6b81b3929b6 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.124944] env[69994]: DEBUG nova.compute.manager [req-80203359-665d-4a68-bc57-33d56dcb8e0b req-d68b493c-705a-4c3d-914f-50425818b975 service nova] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Received event network-vif-plugged-9616eeaa-e068-4ed3-bc81-5eda79dfc54c {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1656.125237] env[69994]: DEBUG oslo_concurrency.lockutils [req-80203359-665d-4a68-bc57-33d56dcb8e0b req-d68b493c-705a-4c3d-914f-50425818b975 service nova] Acquiring lock "bd20712a-a899-4a34-9632-0bf73451c642-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1656.125497] env[69994]: DEBUG oslo_concurrency.lockutils [req-80203359-665d-4a68-bc57-33d56dcb8e0b req-d68b493c-705a-4c3d-914f-50425818b975 service nova] Lock "bd20712a-a899-4a34-9632-0bf73451c642-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1656.125675] env[69994]: DEBUG oslo_concurrency.lockutils [req-80203359-665d-4a68-bc57-33d56dcb8e0b req-d68b493c-705a-4c3d-914f-50425818b975 service nova] Lock "bd20712a-a899-4a34-9632-0bf73451c642-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1656.125870] env[69994]: DEBUG nova.compute.manager [req-80203359-665d-4a68-bc57-33d56dcb8e0b req-d68b493c-705a-4c3d-914f-50425818b975 service nova] [instance: bd20712a-a899-4a34-9632-0bf73451c642] No waiting events found dispatching network-vif-plugged-9616eeaa-e068-4ed3-bc81-5eda79dfc54c {{(pid=69994) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1656.126068] env[69994]: WARNING nova.compute.manager [req-80203359-665d-4a68-bc57-33d56dcb8e0b req-d68b493c-705a-4c3d-914f-50425818b975 service nova] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Received unexpected event network-vif-plugged-9616eeaa-e068-4ed3-bc81-5eda79dfc54c for instance with vm_state building and task_state spawning. [ 1656.206274] env[69994]: DEBUG nova.network.neutron [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Successfully updated port: 9616eeaa-e068-4ed3-bc81-5eda79dfc54c {{(pid=69994) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1656.709026] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "refresh_cache-bd20712a-a899-4a34-9632-0bf73451c642" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1656.709203] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquired lock "refresh_cache-bd20712a-a899-4a34-9632-0bf73451c642" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1656.709302] env[69994]: DEBUG nova.network.neutron [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Building network info cache for instance {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1657.238785] env[69994]: DEBUG nova.network.neutron [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Instance cache missing network info. {{(pid=69994) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1657.351462] env[69994]: DEBUG nova.network.neutron [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Updating instance_info_cache with network_info: [{"id": "9616eeaa-e068-4ed3-bc81-5eda79dfc54c", "address": "fa:16:3e:9c:54:49", "network": {"id": "f981c93f-f03a-4cdd-ac65-6c8511d8093a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2111076881-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a7bbd48e22345cc9c3f09a574143d7f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9616eeaa-e0", "ovs_interfaceid": "9616eeaa-e068-4ed3-bc81-5eda79dfc54c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1657.854279] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Releasing lock "refresh_cache-bd20712a-a899-4a34-9632-0bf73451c642" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1657.854645] env[69994]: DEBUG nova.compute.manager [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Instance network_info: |[{"id": "9616eeaa-e068-4ed3-bc81-5eda79dfc54c", "address": "fa:16:3e:9c:54:49", "network": {"id": "f981c93f-f03a-4cdd-ac65-6c8511d8093a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2111076881-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a7bbd48e22345cc9c3f09a574143d7f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9616eeaa-e0", "ovs_interfaceid": "9616eeaa-e068-4ed3-bc81-5eda79dfc54c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69994) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1657.855115] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9c:54:49', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f880ac2e-d532-4f54-87bb-998a8d1bca78', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9616eeaa-e068-4ed3-bc81-5eda79dfc54c', 'vif_model': 'vmxnet3'}] {{(pid=69994) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1657.862660] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1657.862845] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Creating VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1657.863076] env[69994]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1894d181-9b92-4c43-b667-20b79d5386ee {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.882868] env[69994]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1657.882868] env[69994]: value = "task-3243018" [ 1657.882868] env[69994]: _type = "Task" [ 1657.882868] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1657.891032] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3243018, 'name': CreateVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.150986] env[69994]: DEBUG nova.compute.manager [req-45034de2-162b-403b-aa39-2727c2f1823e req-c035a9f6-bc9a-4171-9c82-fa7a752ffe2a service nova] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Received event network-changed-9616eeaa-e068-4ed3-bc81-5eda79dfc54c {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1658.151202] env[69994]: DEBUG nova.compute.manager [req-45034de2-162b-403b-aa39-2727c2f1823e req-c035a9f6-bc9a-4171-9c82-fa7a752ffe2a service nova] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Refreshing instance network info cache due to event network-changed-9616eeaa-e068-4ed3-bc81-5eda79dfc54c. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1658.151416] env[69994]: DEBUG oslo_concurrency.lockutils [req-45034de2-162b-403b-aa39-2727c2f1823e req-c035a9f6-bc9a-4171-9c82-fa7a752ffe2a service nova] Acquiring lock "refresh_cache-bd20712a-a899-4a34-9632-0bf73451c642" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1658.151556] env[69994]: DEBUG oslo_concurrency.lockutils [req-45034de2-162b-403b-aa39-2727c2f1823e req-c035a9f6-bc9a-4171-9c82-fa7a752ffe2a service nova] Acquired lock "refresh_cache-bd20712a-a899-4a34-9632-0bf73451c642" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1658.151714] env[69994]: DEBUG nova.network.neutron [req-45034de2-162b-403b-aa39-2727c2f1823e req-c035a9f6-bc9a-4171-9c82-fa7a752ffe2a service nova] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Refreshing network info cache for port 9616eeaa-e068-4ed3-bc81-5eda79dfc54c {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1658.391990] env[69994]: DEBUG oslo_vmware.api [-] Task: {'id': task-3243018, 'name': CreateVM_Task, 'duration_secs': 0.307827} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1658.392409] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Created VM on the ESX host {{(pid=69994) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1658.392787] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1658.392951] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1658.393314] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1658.393559] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-639a581c-6afa-49f5-b319-000b0d389103 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.397458] env[69994]: DEBUG oslo_vmware.api [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1658.397458] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52d459e7-3651-2b13-e4c4-ee42c0cd790d" [ 1658.397458] env[69994]: _type = "Task" [ 1658.397458] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1658.404345] env[69994]: DEBUG oslo_vmware.api [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52d459e7-3651-2b13-e4c4-ee42c0cd790d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.825511] env[69994]: DEBUG nova.network.neutron [req-45034de2-162b-403b-aa39-2727c2f1823e req-c035a9f6-bc9a-4171-9c82-fa7a752ffe2a service nova] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Updated VIF entry in instance network info cache for port 9616eeaa-e068-4ed3-bc81-5eda79dfc54c. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1658.825850] env[69994]: DEBUG nova.network.neutron [req-45034de2-162b-403b-aa39-2727c2f1823e req-c035a9f6-bc9a-4171-9c82-fa7a752ffe2a service nova] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Updating instance_info_cache with network_info: [{"id": "9616eeaa-e068-4ed3-bc81-5eda79dfc54c", "address": "fa:16:3e:9c:54:49", "network": {"id": "f981c93f-f03a-4cdd-ac65-6c8511d8093a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2111076881-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a7bbd48e22345cc9c3f09a574143d7f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9616eeaa-e0", "ovs_interfaceid": "9616eeaa-e068-4ed3-bc81-5eda79dfc54c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1658.907520] env[69994]: DEBUG oslo_vmware.api [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52d459e7-3651-2b13-e4c4-ee42c0cd790d, 'name': SearchDatastore_Task, 'duration_secs': 0.009763} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1658.907811] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1658.908050] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Processing image cc2e14cc-b12f-480a-a387-dd21e9efda8b {{(pid=69994) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1658.908305] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1658.908457] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1658.908634] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1658.908879] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1b97c6fa-c7cb-4763-b209-e4a40b1d99f2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.916718] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69994) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1658.916883] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69994) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1658.917566] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8f25237-33ab-49f8-b69f-c03f0fd3a57d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.922367] env[69994]: DEBUG oslo_vmware.api [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1658.922367] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52c3a19d-84ec-aa66-36c0-0f823dddec8f" [ 1658.922367] env[69994]: _type = "Task" [ 1658.922367] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1658.929774] env[69994]: DEBUG oslo_vmware.api [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52c3a19d-84ec-aa66-36c0-0f823dddec8f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.328370] env[69994]: DEBUG oslo_concurrency.lockutils [req-45034de2-162b-403b-aa39-2727c2f1823e req-c035a9f6-bc9a-4171-9c82-fa7a752ffe2a service nova] Releasing lock "refresh_cache-bd20712a-a899-4a34-9632-0bf73451c642" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1659.432735] env[69994]: DEBUG oslo_vmware.api [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52c3a19d-84ec-aa66-36c0-0f823dddec8f, 'name': SearchDatastore_Task, 'duration_secs': 0.007963} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1659.433490] env[69994]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28fd2ead-b45e-4a00-9ffa-112262ef8c96 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.438949] env[69994]: DEBUG oslo_vmware.api [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1659.438949] env[69994]: value = "session[520ca315-cd17-8670-37df-715bbcc23663]52ff22f5-8f80-450c-a014-1af2e70d13d1" [ 1659.438949] env[69994]: _type = "Task" [ 1659.438949] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.446144] env[69994]: DEBUG oslo_vmware.api [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52ff22f5-8f80-450c-a014-1af2e70d13d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.948982] env[69994]: DEBUG oslo_vmware.api [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': session[520ca315-cd17-8670-37df-715bbcc23663]52ff22f5-8f80-450c-a014-1af2e70d13d1, 'name': SearchDatastore_Task, 'duration_secs': 0.008485} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1659.949272] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1659.949523] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] bd20712a-a899-4a34-9632-0bf73451c642/bd20712a-a899-4a34-9632-0bf73451c642.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1659.949773] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-045602c7-807e-486d-b117-f40b3ce2bb5f {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.956450] env[69994]: DEBUG oslo_vmware.api [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1659.956450] env[69994]: value = "task-3243019" [ 1659.956450] env[69994]: _type = "Task" [ 1659.956450] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.964111] env[69994]: DEBUG oslo_vmware.api [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243019, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.465769] env[69994]: DEBUG oslo_vmware.api [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243019, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.429601} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1660.466191] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc2e14cc-b12f-480a-a387-dd21e9efda8b/cc2e14cc-b12f-480a-a387-dd21e9efda8b.vmdk to [datastore2] bd20712a-a899-4a34-9632-0bf73451c642/bd20712a-a899-4a34-9632-0bf73451c642.vmdk {{(pid=69994) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1660.466280] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Extending root virtual disk to 1048576 {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1660.466478] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-461548b9-17b8-4024-8d1a-4ecd80d0d762 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.472051] env[69994]: DEBUG oslo_vmware.api [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1660.472051] env[69994]: value = "task-3243020" [ 1660.472051] env[69994]: _type = "Task" [ 1660.472051] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1660.479591] env[69994]: DEBUG oslo_vmware.api [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243020, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.982554] env[69994]: DEBUG oslo_vmware.api [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243020, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058805} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1660.982797] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Extended root virtual disk {{(pid=69994) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1660.983548] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0884fde4-6a35-4970-b634-09a0e5fb84b7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.004420] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Reconfiguring VM instance instance-0000007f to attach disk [datastore2] bd20712a-a899-4a34-9632-0bf73451c642/bd20712a-a899-4a34-9632-0bf73451c642.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1661.004672] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-037bd2f5-1864-4260-9522-e20b38e866eb {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.023180] env[69994]: DEBUG oslo_vmware.api [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1661.023180] env[69994]: value = "task-3243021" [ 1661.023180] env[69994]: _type = "Task" [ 1661.023180] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1661.030377] env[69994]: DEBUG oslo_vmware.api [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243021, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.532701] env[69994]: DEBUG oslo_vmware.api [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243021, 'name': ReconfigVM_Task, 'duration_secs': 0.288111} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1661.533162] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Reconfigured VM instance instance-0000007f to attach disk [datastore2] bd20712a-a899-4a34-9632-0bf73451c642/bd20712a-a899-4a34-9632-0bf73451c642.vmdk or device None with type sparse {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1661.533604] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e436c301-011f-462a-949e-b545ec0ebbf8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.540749] env[69994]: DEBUG oslo_vmware.api [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1661.540749] env[69994]: value = "task-3243022" [ 1661.540749] env[69994]: _type = "Task" [ 1661.540749] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1661.548428] env[69994]: DEBUG oslo_vmware.api [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243022, 'name': Rename_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.050890] env[69994]: DEBUG oslo_vmware.api [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243022, 'name': Rename_Task, 'duration_secs': 0.157436} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1662.051165] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Powering on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1662.051423] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d718b295-3e46-47a8-8d5d-073bf501df61 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.057159] env[69994]: DEBUG oslo_vmware.api [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1662.057159] env[69994]: value = "task-3243023" [ 1662.057159] env[69994]: _type = "Task" [ 1662.057159] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.063834] env[69994]: DEBUG oslo_vmware.api [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243023, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.567083] env[69994]: DEBUG oslo_vmware.api [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243023, 'name': PowerOnVM_Task, 'duration_secs': 0.496961} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1662.567521] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Powered on the VM {{(pid=69994) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1662.567568] env[69994]: INFO nova.compute.manager [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Took 6.58 seconds to spawn the instance on the hypervisor. [ 1662.567722] env[69994]: DEBUG nova.compute.manager [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Checking state {{(pid=69994) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1662.568494] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc0bbb08-5054-4d3a-b974-74cc5635f510 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.087691] env[69994]: INFO nova.compute.manager [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Took 11.24 seconds to build instance. [ 1663.589483] env[69994]: DEBUG oslo_concurrency.lockutils [None req-eb26ebb5-d806-4ff0-9c1f-de8f1f0e8316 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "bd20712a-a899-4a34-9632-0bf73451c642" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.746s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1663.742901] env[69994]: DEBUG nova.compute.manager [req-a8b7e033-38fb-41a1-882f-1412e5825e1a req-2b002d1d-d94d-4012-9d1f-b8c811a3a115 service nova] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Received event network-changed-9616eeaa-e068-4ed3-bc81-5eda79dfc54c {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1663.743187] env[69994]: DEBUG nova.compute.manager [req-a8b7e033-38fb-41a1-882f-1412e5825e1a req-2b002d1d-d94d-4012-9d1f-b8c811a3a115 service nova] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Refreshing instance network info cache due to event network-changed-9616eeaa-e068-4ed3-bc81-5eda79dfc54c. {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1663.743350] env[69994]: DEBUG oslo_concurrency.lockutils [req-a8b7e033-38fb-41a1-882f-1412e5825e1a req-2b002d1d-d94d-4012-9d1f-b8c811a3a115 service nova] Acquiring lock "refresh_cache-bd20712a-a899-4a34-9632-0bf73451c642" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1663.743520] env[69994]: DEBUG oslo_concurrency.lockutils [req-a8b7e033-38fb-41a1-882f-1412e5825e1a req-2b002d1d-d94d-4012-9d1f-b8c811a3a115 service nova] Acquired lock "refresh_cache-bd20712a-a899-4a34-9632-0bf73451c642" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1663.743699] env[69994]: DEBUG nova.network.neutron [req-a8b7e033-38fb-41a1-882f-1412e5825e1a req-2b002d1d-d94d-4012-9d1f-b8c811a3a115 service nova] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Refreshing network info cache for port 9616eeaa-e068-4ed3-bc81-5eda79dfc54c {{(pid=69994) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1664.437868] env[69994]: DEBUG nova.network.neutron [req-a8b7e033-38fb-41a1-882f-1412e5825e1a req-2b002d1d-d94d-4012-9d1f-b8c811a3a115 service nova] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Updated VIF entry in instance network info cache for port 9616eeaa-e068-4ed3-bc81-5eda79dfc54c. {{(pid=69994) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1664.438262] env[69994]: DEBUG nova.network.neutron [req-a8b7e033-38fb-41a1-882f-1412e5825e1a req-2b002d1d-d94d-4012-9d1f-b8c811a3a115 service nova] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Updating instance_info_cache with network_info: [{"id": "9616eeaa-e068-4ed3-bc81-5eda79dfc54c", "address": "fa:16:3e:9c:54:49", "network": {"id": "f981c93f-f03a-4cdd-ac65-6c8511d8093a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2111076881-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.130", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a7bbd48e22345cc9c3f09a574143d7f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9616eeaa-e0", "ovs_interfaceid": "9616eeaa-e068-4ed3-bc81-5eda79dfc54c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1664.940741] env[69994]: DEBUG oslo_concurrency.lockutils [req-a8b7e033-38fb-41a1-882f-1412e5825e1a req-2b002d1d-d94d-4012-9d1f-b8c811a3a115 service nova] Releasing lock "refresh_cache-bd20712a-a899-4a34-9632-0bf73451c642" {{(pid=69994) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1691.145574] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1691.145921] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1691.146048] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69994) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1692.146160] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1695.146539] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1696.142263] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1696.145829] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1696.146042] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1696.649868] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1696.650201] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1696.650365] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1696.650520] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69994) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1696.651440] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57bd9ecb-f74d-4730-8c4a-d2f2800dbe28 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.659583] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad6766e5-486f-4f29-9a8c-76a8112b5ff5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.673135] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4184ab3b-06c1-492f-bd4b-ddc693678cf1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.679034] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11972176-a109-4792-8529-44f7d5a52baf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.707644] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180801MB free_disk=120GB free_vcpus=48 pci_devices=None {{(pid=69994) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1696.707793] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1696.707999] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1697.731138] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Instance bd20712a-a899-4a34-9632-0bf73451c642 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69994) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1697.731377] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1697.731418] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=69994) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1697.747188] env[69994]: DEBUG nova.scheduler.client.report [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Refreshing inventories for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1697.758588] env[69994]: DEBUG nova.scheduler.client.report [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Updating ProviderTree inventory for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1697.758780] env[69994]: DEBUG nova.compute.provider_tree [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Updating inventory in ProviderTree for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1697.768313] env[69994]: DEBUG nova.scheduler.client.report [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Refreshing aggregate associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, aggregates: None {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1697.784588] env[69994]: DEBUG nova.scheduler.client.report [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Refreshing trait associations for resource provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=69994) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1697.806336] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-878c8d71-807f-4c86-b02a-9b7be284d1b5 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.813443] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcfee395-8f68-4cfd-9535-dc4f6a29e2e1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.843246] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0472d2aa-651f-48db-a3ea-c9a908e0e7f2 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.849810] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4341e2ce-ac6e-40cb-84d0-c0edcaf4b77d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.862682] env[69994]: DEBUG nova.compute.provider_tree [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1698.365998] env[69994]: DEBUG nova.scheduler.client.report [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1698.870869] env[69994]: DEBUG nova.compute.resource_tracker [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69994) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1698.871238] env[69994]: DEBUG oslo_concurrency.lockutils [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.163s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1699.871877] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1701.625977] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c5d136de-4381-4eb6-8842-600c95e72cf1 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "bd20712a-a899-4a34-9632-0bf73451c642" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1701.626242] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c5d136de-4381-4eb6-8842-600c95e72cf1 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "bd20712a-a899-4a34-9632-0bf73451c642" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1702.129805] env[69994]: DEBUG nova.compute.utils [None req-c5d136de-4381-4eb6-8842-600c95e72cf1 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Using /dev/sd instead of None {{(pid=69994) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1702.632551] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c5d136de-4381-4eb6-8842-600c95e72cf1 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "bd20712a-a899-4a34-9632-0bf73451c642" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1703.689546] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c5d136de-4381-4eb6-8842-600c95e72cf1 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "bd20712a-a899-4a34-9632-0bf73451c642" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1703.689895] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c5d136de-4381-4eb6-8842-600c95e72cf1 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "bd20712a-a899-4a34-9632-0bf73451c642" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1703.690121] env[69994]: INFO nova.compute.manager [None req-c5d136de-4381-4eb6-8842-600c95e72cf1 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Attaching volume d390ac6f-2034-4ade-82be-0dcc787f51ea to /dev/sdb [ 1703.718843] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8badf8aa-f647-4daf-8458-ab4fef0be57c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.725706] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1d4d8a6-e4f1-4e3d-afce-274b3ec166ba {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.738699] env[69994]: DEBUG nova.virt.block_device [None req-c5d136de-4381-4eb6-8842-600c95e72cf1 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Updating existing volume attachment record: f87d52b4-5456-4b07-a051-9dc24f2f4c4e {{(pid=69994) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1708.282181] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5d136de-4381-4eb6-8842-600c95e72cf1 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Volume attach. Driver type: vmdk {{(pid=69994) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1708.282437] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5d136de-4381-4eb6-8842-600c95e72cf1 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-648079', 'volume_id': 'd390ac6f-2034-4ade-82be-0dcc787f51ea', 'name': 'volume-d390ac6f-2034-4ade-82be-0dcc787f51ea', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'bd20712a-a899-4a34-9632-0bf73451c642', 'attached_at': '', 'detached_at': '', 'volume_id': 'd390ac6f-2034-4ade-82be-0dcc787f51ea', 'serial': 'd390ac6f-2034-4ade-82be-0dcc787f51ea'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1708.283304] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec7bcf81-cd76-4f12-87c8-1ed6eaeec3e9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.299229] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f106986-fd50-4678-a2e5-adf963380e55 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.322693] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5d136de-4381-4eb6-8842-600c95e72cf1 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Reconfiguring VM instance instance-0000007f to attach disk [datastore1] volume-d390ac6f-2034-4ade-82be-0dcc787f51ea/volume-d390ac6f-2034-4ade-82be-0dcc787f51ea.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1708.322975] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5f9ba2a3-fd86-4f96-a685-80d4ba070dea {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.340767] env[69994]: DEBUG oslo_vmware.api [None req-c5d136de-4381-4eb6-8842-600c95e72cf1 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1708.340767] env[69994]: value = "task-3243026" [ 1708.340767] env[69994]: _type = "Task" [ 1708.340767] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1708.348429] env[69994]: DEBUG oslo_vmware.api [None req-c5d136de-4381-4eb6-8842-600c95e72cf1 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243026, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.850673] env[69994]: DEBUG oslo_vmware.api [None req-c5d136de-4381-4eb6-8842-600c95e72cf1 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243026, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.351849] env[69994]: DEBUG oslo_vmware.api [None req-c5d136de-4381-4eb6-8842-600c95e72cf1 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243026, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.852236] env[69994]: DEBUG oslo_vmware.api [None req-c5d136de-4381-4eb6-8842-600c95e72cf1 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243026, 'name': ReconfigVM_Task, 'duration_secs': 1.432035} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1709.852500] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5d136de-4381-4eb6-8842-600c95e72cf1 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Reconfigured VM instance instance-0000007f to attach disk [datastore1] volume-d390ac6f-2034-4ade-82be-0dcc787f51ea/volume-d390ac6f-2034-4ade-82be-0dcc787f51ea.vmdk or device None with type thin {{(pid=69994) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1709.857131] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-98c5bd7f-162f-4fb5-99d1-86a0f23344c9 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.871131] env[69994]: DEBUG oslo_vmware.api [None req-c5d136de-4381-4eb6-8842-600c95e72cf1 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1709.871131] env[69994]: value = "task-3243027" [ 1709.871131] env[69994]: _type = "Task" [ 1709.871131] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1709.878579] env[69994]: DEBUG oslo_vmware.api [None req-c5d136de-4381-4eb6-8842-600c95e72cf1 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243027, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.380936] env[69994]: DEBUG oslo_vmware.api [None req-c5d136de-4381-4eb6-8842-600c95e72cf1 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243027, 'name': ReconfigVM_Task, 'duration_secs': 0.151259} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1710.381256] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5d136de-4381-4eb6-8842-600c95e72cf1 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-648079', 'volume_id': 'd390ac6f-2034-4ade-82be-0dcc787f51ea', 'name': 'volume-d390ac6f-2034-4ade-82be-0dcc787f51ea', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'bd20712a-a899-4a34-9632-0bf73451c642', 'attached_at': '', 'detached_at': '', 'volume_id': 'd390ac6f-2034-4ade-82be-0dcc787f51ea', 'serial': 'd390ac6f-2034-4ade-82be-0dcc787f51ea'} {{(pid=69994) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1711.416332] env[69994]: DEBUG nova.objects.instance [None req-c5d136de-4381-4eb6-8842-600c95e72cf1 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lazy-loading 'flavor' on Instance uuid bd20712a-a899-4a34-9632-0bf73451c642 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1711.922354] env[69994]: DEBUG oslo_concurrency.lockutils [None req-c5d136de-4381-4eb6-8842-600c95e72cf1 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "bd20712a-a899-4a34-9632-0bf73451c642" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.232s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1712.124264] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1bdd2c82-ae6e-4c44-8a10-aa6a87829ef7 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "bd20712a-a899-4a34-9632-0bf73451c642" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1712.124525] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1bdd2c82-ae6e-4c44-8a10-aa6a87829ef7 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "bd20712a-a899-4a34-9632-0bf73451c642" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1712.627390] env[69994]: INFO nova.compute.manager [None req-1bdd2c82-ae6e-4c44-8a10-aa6a87829ef7 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Detaching volume d390ac6f-2034-4ade-82be-0dcc787f51ea [ 1712.656900] env[69994]: INFO nova.virt.block_device [None req-1bdd2c82-ae6e-4c44-8a10-aa6a87829ef7 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Attempting to driver detach volume d390ac6f-2034-4ade-82be-0dcc787f51ea from mountpoint /dev/sdb [ 1712.657148] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-1bdd2c82-ae6e-4c44-8a10-aa6a87829ef7 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Volume detach. Driver type: vmdk {{(pid=69994) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1712.657330] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-1bdd2c82-ae6e-4c44-8a10-aa6a87829ef7 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-648079', 'volume_id': 'd390ac6f-2034-4ade-82be-0dcc787f51ea', 'name': 'volume-d390ac6f-2034-4ade-82be-0dcc787f51ea', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'bd20712a-a899-4a34-9632-0bf73451c642', 'attached_at': '', 'detached_at': '', 'volume_id': 'd390ac6f-2034-4ade-82be-0dcc787f51ea', 'serial': 'd390ac6f-2034-4ade-82be-0dcc787f51ea'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1712.658230] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1479fe7-8361-433e-8f78-52338e696e3d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.679491] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7903d43f-0688-4836-b7f5-07f292cb62c7 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.685984] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db91c5a1-e7f8-458c-8017-dded81f5ca97 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.705193] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a84ff987-220d-4cef-bbf1-ab910832df98 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.718921] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-1bdd2c82-ae6e-4c44-8a10-aa6a87829ef7 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] The volume has not been displaced from its original location: [datastore1] volume-d390ac6f-2034-4ade-82be-0dcc787f51ea/volume-d390ac6f-2034-4ade-82be-0dcc787f51ea.vmdk. No consolidation needed. {{(pid=69994) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1712.723958] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-1bdd2c82-ae6e-4c44-8a10-aa6a87829ef7 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Reconfiguring VM instance instance-0000007f to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1712.724211] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ac4929c4-5fe9-469e-833a-ebf69d3e551d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.740717] env[69994]: DEBUG oslo_vmware.api [None req-1bdd2c82-ae6e-4c44-8a10-aa6a87829ef7 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1712.740717] env[69994]: value = "task-3243028" [ 1712.740717] env[69994]: _type = "Task" [ 1712.740717] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1712.748879] env[69994]: DEBUG oslo_vmware.api [None req-1bdd2c82-ae6e-4c44-8a10-aa6a87829ef7 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243028, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1713.250153] env[69994]: DEBUG oslo_vmware.api [None req-1bdd2c82-ae6e-4c44-8a10-aa6a87829ef7 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243028, 'name': ReconfigVM_Task, 'duration_secs': 0.212477} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1713.250381] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-1bdd2c82-ae6e-4c44-8a10-aa6a87829ef7 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Reconfigured VM instance instance-0000007f to detach disk 2001 {{(pid=69994) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1713.254836] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a32fdf4d-c953-4ca0-95d0-baca506b24c1 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.269280] env[69994]: DEBUG oslo_vmware.api [None req-1bdd2c82-ae6e-4c44-8a10-aa6a87829ef7 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1713.269280] env[69994]: value = "task-3243029" [ 1713.269280] env[69994]: _type = "Task" [ 1713.269280] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1713.276541] env[69994]: DEBUG oslo_vmware.api [None req-1bdd2c82-ae6e-4c44-8a10-aa6a87829ef7 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243029, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1713.778520] env[69994]: DEBUG oslo_vmware.api [None req-1bdd2c82-ae6e-4c44-8a10-aa6a87829ef7 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243029, 'name': ReconfigVM_Task, 'duration_secs': 0.135796} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1713.778816] env[69994]: DEBUG nova.virt.vmwareapi.volumeops [None req-1bdd2c82-ae6e-4c44-8a10-aa6a87829ef7 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-648079', 'volume_id': 'd390ac6f-2034-4ade-82be-0dcc787f51ea', 'name': 'volume-d390ac6f-2034-4ade-82be-0dcc787f51ea', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'bd20712a-a899-4a34-9632-0bf73451c642', 'attached_at': '', 'detached_at': '', 'volume_id': 'd390ac6f-2034-4ade-82be-0dcc787f51ea', 'serial': 'd390ac6f-2034-4ade-82be-0dcc787f51ea'} {{(pid=69994) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1714.319324] env[69994]: DEBUG nova.objects.instance [None req-1bdd2c82-ae6e-4c44-8a10-aa6a87829ef7 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lazy-loading 'flavor' on Instance uuid bd20712a-a899-4a34-9632-0bf73451c642 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1715.328070] env[69994]: DEBUG oslo_concurrency.lockutils [None req-1bdd2c82-ae6e-4c44-8a10-aa6a87829ef7 tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "bd20712a-a899-4a34-9632-0bf73451c642" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.203s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1716.357050] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ff42143a-292b-4608-b56d-1b7549cbbe0f tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "bd20712a-a899-4a34-9632-0bf73451c642" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1716.357050] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ff42143a-292b-4608-b56d-1b7549cbbe0f tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "bd20712a-a899-4a34-9632-0bf73451c642" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1716.357050] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ff42143a-292b-4608-b56d-1b7549cbbe0f tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "bd20712a-a899-4a34-9632-0bf73451c642-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1716.357050] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ff42143a-292b-4608-b56d-1b7549cbbe0f tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "bd20712a-a899-4a34-9632-0bf73451c642-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1716.357050] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ff42143a-292b-4608-b56d-1b7549cbbe0f tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "bd20712a-a899-4a34-9632-0bf73451c642-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1716.359017] env[69994]: INFO nova.compute.manager [None req-ff42143a-292b-4608-b56d-1b7549cbbe0f tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Terminating instance [ 1716.863519] env[69994]: DEBUG nova.compute.manager [None req-ff42143a-292b-4608-b56d-1b7549cbbe0f tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Start destroying the instance on the hypervisor. {{(pid=69994) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1716.863736] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ff42143a-292b-4608-b56d-1b7549cbbe0f tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Destroying instance {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1716.865010] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00306bad-8593-498c-bd1b-932300b732cf {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.872610] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff42143a-292b-4608-b56d-1b7549cbbe0f tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Powering off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1716.872827] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-844c6f86-55af-42c0-b672-3e2e99edef7c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.877953] env[69994]: DEBUG oslo_vmware.api [None req-ff42143a-292b-4608-b56d-1b7549cbbe0f tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1716.877953] env[69994]: value = "task-3243030" [ 1716.877953] env[69994]: _type = "Task" [ 1716.877953] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1716.885460] env[69994]: DEBUG oslo_vmware.api [None req-ff42143a-292b-4608-b56d-1b7549cbbe0f tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243030, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.387362] env[69994]: DEBUG oslo_vmware.api [None req-ff42143a-292b-4608-b56d-1b7549cbbe0f tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243030, 'name': PowerOffVM_Task, 'duration_secs': 0.162564} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1717.387764] env[69994]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff42143a-292b-4608-b56d-1b7549cbbe0f tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Powered off the VM {{(pid=69994) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1717.387807] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ff42143a-292b-4608-b56d-1b7549cbbe0f tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Unregistering the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1717.388053] env[69994]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2bbe04c1-cedc-4c85-a4b5-a03dcb0fab07 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.450194] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ff42143a-292b-4608-b56d-1b7549cbbe0f tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Unregistered the VM {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1717.450434] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ff42143a-292b-4608-b56d-1b7549cbbe0f tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Deleting contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1717.450599] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff42143a-292b-4608-b56d-1b7549cbbe0f tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Deleting the datastore file [datastore2] bd20712a-a899-4a34-9632-0bf73451c642 {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1717.450861] env[69994]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a618157b-b326-4b07-baab-27ab75819f9e {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.457516] env[69994]: DEBUG oslo_vmware.api [None req-ff42143a-292b-4608-b56d-1b7549cbbe0f tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for the task: (returnval){ [ 1717.457516] env[69994]: value = "task-3243032" [ 1717.457516] env[69994]: _type = "Task" [ 1717.457516] env[69994]: } to complete. {{(pid=69994) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1717.464818] env[69994]: DEBUG oslo_vmware.api [None req-ff42143a-292b-4608-b56d-1b7549cbbe0f tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243032, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.966600] env[69994]: DEBUG oslo_vmware.api [None req-ff42143a-292b-4608-b56d-1b7549cbbe0f tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Task: {'id': task-3243032, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.129962} completed successfully. {{(pid=69994) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1717.966848] env[69994]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff42143a-292b-4608-b56d-1b7549cbbe0f tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Deleted the datastore file {{(pid=69994) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1717.967034] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ff42143a-292b-4608-b56d-1b7549cbbe0f tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Deleted contents of the VM from datastore datastore2 {{(pid=69994) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1717.967223] env[69994]: DEBUG nova.virt.vmwareapi.vmops [None req-ff42143a-292b-4608-b56d-1b7549cbbe0f tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Instance destroyed {{(pid=69994) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1717.967395] env[69994]: INFO nova.compute.manager [None req-ff42143a-292b-4608-b56d-1b7549cbbe0f tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1717.967682] env[69994]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ff42143a-292b-4608-b56d-1b7549cbbe0f tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69994) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1717.967889] env[69994]: DEBUG nova.compute.manager [-] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Deallocating network for instance {{(pid=69994) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1717.968176] env[69994]: DEBUG nova.network.neutron [-] [instance: bd20712a-a899-4a34-9632-0bf73451c642] deallocate_for_instance() {{(pid=69994) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1718.414690] env[69994]: DEBUG nova.compute.manager [req-5f2931a0-0ada-43c8-9e45-1b65a4b72b8b req-df0aeedf-bc25-4d3f-8a11-895578f46dd6 service nova] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Received event network-vif-deleted-9616eeaa-e068-4ed3-bc81-5eda79dfc54c {{(pid=69994) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1718.414929] env[69994]: INFO nova.compute.manager [req-5f2931a0-0ada-43c8-9e45-1b65a4b72b8b req-df0aeedf-bc25-4d3f-8a11-895578f46dd6 service nova] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Neutron deleted interface 9616eeaa-e068-4ed3-bc81-5eda79dfc54c; detaching it from the instance and deleting it from the info cache [ 1718.415054] env[69994]: DEBUG nova.network.neutron [req-5f2931a0-0ada-43c8-9e45-1b65a4b72b8b req-df0aeedf-bc25-4d3f-8a11-895578f46dd6 service nova] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1718.895548] env[69994]: DEBUG nova.network.neutron [-] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Updating instance_info_cache with network_info: [] {{(pid=69994) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1718.917238] env[69994]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-456c635e-e40e-4167-bd8f-cecad3ae6ab8 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.926557] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43c1f41d-b4a7-4865-baf6-512c675ae10d {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.950372] env[69994]: DEBUG nova.compute.manager [req-5f2931a0-0ada-43c8-9e45-1b65a4b72b8b req-df0aeedf-bc25-4d3f-8a11-895578f46dd6 service nova] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Detach interface failed, port_id=9616eeaa-e068-4ed3-bc81-5eda79dfc54c, reason: Instance bd20712a-a899-4a34-9632-0bf73451c642 could not be found. {{(pid=69994) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1719.399071] env[69994]: INFO nova.compute.manager [-] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Took 1.43 seconds to deallocate network for instance. [ 1719.906474] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ff42143a-292b-4608-b56d-1b7549cbbe0f tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1719.906817] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ff42143a-292b-4608-b56d-1b7549cbbe0f tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1719.907081] env[69994]: DEBUG nova.objects.instance [None req-ff42143a-292b-4608-b56d-1b7549cbbe0f tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lazy-loading 'resources' on Instance uuid bd20712a-a899-4a34-9632-0bf73451c642 {{(pid=69994) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1720.440636] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-253a28b0-7426-4b24-a21f-9502d442a0a4 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.448089] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-650c2805-3c77-479f-ba2a-fbfda355a58c {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.477989] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ef6be6b-ccb1-41d2-a9fe-46effd7c10ce {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.484550] env[69994]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5befc100-7f7d-4430-9919-4611fd271003 {{(pid=69994) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.497107] env[69994]: DEBUG nova.compute.provider_tree [None req-ff42143a-292b-4608-b56d-1b7549cbbe0f tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Inventory has not changed in ProviderTree for provider: 92ce3c95-4efe-4d04-802b-6b187afc5aa7 {{(pid=69994) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1721.000757] env[69994]: DEBUG nova.scheduler.client.report [None req-ff42143a-292b-4608-b56d-1b7549cbbe0f tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Inventory has not changed for provider 92ce3c95-4efe-4d04-802b-6b187afc5aa7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 120, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69994) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1721.505765] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ff42143a-292b-4608-b56d-1b7549cbbe0f tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.599s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1721.525098] env[69994]: INFO nova.scheduler.client.report [None req-ff42143a-292b-4608-b56d-1b7549cbbe0f tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Deleted allocations for instance bd20712a-a899-4a34-9632-0bf73451c642 [ 1722.034415] env[69994]: DEBUG oslo_concurrency.lockutils [None req-ff42143a-292b-4608-b56d-1b7549cbbe0f tempest-AttachVolumeNegativeTest-699338048 tempest-AttachVolumeNegativeTest-699338048-project-member] Lock "bd20712a-a899-4a34-9632-0bf73451c642" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.678s {{(pid=69994) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1751.141594] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1752.145892] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1752.146183] env[69994]: DEBUG oslo_service.periodic_task [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69994) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1752.146314] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] Cleaning up deleted instances {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11834}} [ 1752.650627] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] There are 4 instances to clean {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11843}} [ 1752.650818] env[69994]: DEBUG nova.compute.manager [None req-21153c73-1b63-48b1-b430-b2dbbd6afc98 None None] [instance: bd20712a-a899-4a34-9632-0bf73451c642] Instance has had 0 of 5 cleanup attempts {{(pid=69994) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}}